From 658e970ce0c817d0ec17277b1ed9072340f177aa Mon Sep 17 00:00:00 2001 From: Nico Melone Date: Wed, 30 Apr 2025 08:48:49 -0500 Subject: [PATCH] uncompyled files --- APPS_UNCOMPILED/lib/OpenOPC.py | 1157 ++++ APPS_UNCOMPILED/lib/Pyro4/__init__.py | 7 + APPS_UNCOMPILED/lib/Pyro4/configuration.py | 159 + APPS_UNCOMPILED/lib/Pyro4/constants.py | 17 + APPS_UNCOMPILED/lib/Pyro4/core.py | 1971 ++++++ APPS_UNCOMPILED/lib/Pyro4/errors.py | 51 + APPS_UNCOMPILED/lib/Pyro4/futures.py | 212 + APPS_UNCOMPILED/lib/Pyro4/message.py | 199 + APPS_UNCOMPILED/lib/Pyro4/naming.py | 7 + APPS_UNCOMPILED/lib/Pyro4/naming_storage.py | 496 ++ APPS_UNCOMPILED/lib/Pyro4/nsc.py | 163 + .../lib/Pyro4/socketserver/__init__.py | 13 + .../socketserver/existingconnectionserver.py | 121 + .../lib/Pyro4/socketserver/multiplexserver.py | 221 + .../lib/Pyro4/socketserver/threadpool.py | 142 + .../Pyro4/socketserver/threadpoolserver.py | 262 + APPS_UNCOMPILED/lib/Pyro4/socketutil.py | 641 ++ APPS_UNCOMPILED/lib/Pyro4/test/__init__.py | 8 + APPS_UNCOMPILED/lib/Pyro4/test/echoserver.py | 208 + APPS_UNCOMPILED/lib/Pyro4/util.py | 976 +++ APPS_UNCOMPILED/lib/Pyro4/utils/__init__.py | 8 + APPS_UNCOMPILED/lib/Pyro4/utils/flame.py | 305 + .../lib/Pyro4/utils/flameserver.py | 68 + .../lib/Pyro4/utils/httpgateway.py | 276 + APPS_UNCOMPILED/lib/autologging.py | 1209 ++++ .../lib/azure/iot/device/__init__.py | 21 + .../lib/azure/iot/device/aio/__init__.py | 18 + .../iot/device/aio/patch_documentation.py | 189 + .../lib/azure/iot/device/common/__init__.py | 16 + .../azure/iot/device/common/async_adapter.py | 77 + .../azure/iot/device/common/asyncio_compat.py | 79 + .../azure/iot/device/common/auth/__init__.py | 8 + .../device/common/auth/connection_string.py | 93 + .../azure/iot/device/common/auth/sastoken.py | 130 + .../device/common/auth/signing_mechanism.py | 63 + .../iot/device/common/callable_weak_method.py | 29 + .../iot/device/common/chainable_exception.py | 20 + .../iot/device/common/evented_callback.py | 58 + .../iot/device/common/handle_exceptions.py | 54 + .../azure/iot/device/common/http_transport.py | 104 + .../iot/device/common/models/__init__.py | 13 + .../iot/device/common/models/proxy_options.py | 49 + .../azure/iot/device/common/models/x509.py | 37 + .../azure/iot/device/common/mqtt_transport.py | 513 ++ .../iot/device/common/pipeline/__init__.py | 17 + .../iot/device/common/pipeline/config.py | 41 + .../common/pipeline/pipeline_events_base.py | 38 + .../common/pipeline/pipeline_events_mqtt.py | 22 + .../common/pipeline/pipeline_exceptions.py | 28 + .../common/pipeline/pipeline_ops_base.py | 273 + .../common/pipeline/pipeline_ops_http.py | 33 + .../common/pipeline/pipeline_ops_mqtt.py | 65 + .../common/pipeline/pipeline_stages_base.py | 685 ++ .../common/pipeline/pipeline_stages_http.py | 67 + .../common/pipeline/pipeline_stages_mqtt.py | 151 + .../device/common/pipeline/pipeline_thread.py | 134 + .../iot/device/common/transport_exceptions.py | 32 + .../azure/iot/device/common/version_compat.py | 31 + .../lib/azure/iot/device/constant.py | 20 + .../lib/azure/iot/device/exceptions.py | 32 + .../lib/azure/iot/device/iothub/__init__.py | 16 + .../iot/device/iothub/abstract_clients.py | 621 ++ .../azure/iot/device/iothub/aio/__init__.py | 15 + .../iot/device/iothub/aio/async_clients.py | 583 ++ .../iothub/aio/async_handler_manager.py | 115 + .../iot/device/iothub/aio/async_inbox.py | 69 + .../iot/device/iothub/aio/loop_management.py | 57 + .../lib/azure/iot/device/iothub/edge_hsm.py | 164 + .../azure/iot/device/iothub/inbox_manager.py | 163 + .../iot/device/iothub/models/__init__.py | 13 + .../azure/iot/device/iothub/models/message.py | 58 + .../azure/iot/device/iothub/models/methods.py | 62 + .../azure/iot/device/iothub/models/twin.py | 18 + .../iot/device/iothub/pipeline/__init__.py | 16 + .../iot/device/iothub/pipeline/config.py | 30 + .../iot/device/iothub/pipeline/constant.py | 14 + .../iot/device/iothub/pipeline/exceptions.py | 10 + .../device/iothub/pipeline/http_map_error.py | 61 + .../iothub/pipeline/http_path_iothub.py | 40 + .../device/iothub/pipeline/http_pipeline.py | 115 + .../device/iothub/pipeline/mqtt_pipeline.py | 308 + .../iothub/pipeline/mqtt_topic_iothub.py | 342 + .../iothub/pipeline/pipeline_events_iothub.py | 50 + .../iothub/pipeline/pipeline_ops_iothub.py | 83 + .../pipeline/pipeline_ops_iothub_http.py | 72 + .../iothub/pipeline/pipeline_stages_iothub.py | 128 + .../pipeline/pipeline_stages_iothub_http.py | 114 + .../pipeline/pipeline_stages_iothub_mqtt.py | 149 + .../azure/iot/device/iothub/sync_clients.py | 627 ++ .../iot/device/iothub/sync_handler_manager.py | 197 + .../lib/azure/iot/device/iothub/sync_inbox.py | 119 + APPS_UNCOMPILED/lib/azure/iot/device/patch.py | 88 + .../azure/iot/device/patch_documentation.py | 189 + .../azure/iot/device/provisioning/__init__.py | 17 + .../abstract_provisioning_device_client.py | 197 + .../iot/device/provisioning/aio/__init__.py | 15 + .../aio/async_provisioning_device_client.py | 103 + .../device/provisioning/models/__init__.py | 12 + .../models/registration_result.py | 93 + .../device/provisioning/pipeline/__init__.py | 15 + .../device/provisioning/pipeline/config.py | 26 + .../device/provisioning/pipeline/constant.py | 17 + .../provisioning/pipeline/exceptions.py | 10 + .../provisioning/pipeline/mqtt_pipeline.py | 128 + .../pipeline/mqtt_topic_provisioning.py | 99 + .../pipeline/pipeline_ops_provisioning.py | 51 + .../pipeline/pipeline_stages_provisioning.py | 291 + .../pipeline_stages_provisioning_mqtt.py | 103 + .../provisioning_device_client.py | 108 + .../lib/azure/iot/device/user_agent.py | 38 + APPS_UNCOMPILED/lib/chardet/__init__.py | 62 + APPS_UNCOMPILED/lib/chardet/big5freq.py | 558 ++ APPS_UNCOMPILED/lib/chardet/big5prober.py | 27 + .../lib/chardet/chardistribution.py | 163 + .../lib/chardet/charsetgroupprober.py | 89 + APPS_UNCOMPILED/lib/chardet/charsetprober.py | 99 + APPS_UNCOMPILED/lib/chardet/cli/__init__.py | 8 + APPS_UNCOMPILED/lib/chardet/cli/chardetect.py | 77 + .../lib/chardet/codingstatemachine.py | 43 + APPS_UNCOMPILED/lib/chardet/compat.py | 20 + APPS_UNCOMPILED/lib/chardet/cp949prober.py | 27 + APPS_UNCOMPILED/lib/chardet/enums.py | 66 + APPS_UNCOMPILED/lib/chardet/escprober.py | 76 + APPS_UNCOMPILED/lib/chardet/escsm.py | 119 + APPS_UNCOMPILED/lib/chardet/eucjpprober.py | 66 + APPS_UNCOMPILED/lib/chardet/euckrfreq.py | 245 + APPS_UNCOMPILED/lib/chardet/euckrprober.py | 27 + APPS_UNCOMPILED/lib/chardet/euctwfreq.py | 575 ++ APPS_UNCOMPILED/lib/chardet/euctwprober.py | 27 + APPS_UNCOMPILED/lib/chardet/gb2312freq.py | 405 ++ APPS_UNCOMPILED/lib/chardet/gb2312prober.py | 27 + APPS_UNCOMPILED/lib/chardet/hebrewprober.py | 101 + APPS_UNCOMPILED/lib/chardet/jisfreq.py | 442 ++ APPS_UNCOMPILED/lib/chardet/jpcntx.py | 180 + .../lib/chardet/langbulgarianmodel.py | 4566 +++++++++++++ APPS_UNCOMPILED/lib/chardet/langgreekmodel.py | 4316 ++++++++++++ .../lib/chardet/langhebrewmodel.py | 4303 ++++++++++++ .../lib/chardet/langhungarianmodel.py | 4566 +++++++++++++ .../lib/chardet/langrussianmodel.py | 5618 ++++++++++++++++ APPS_UNCOMPILED/lib/chardet/langthaimodel.py | 4303 ++++++++++++ .../lib/chardet/langturkishmodel.py | 4303 ++++++++++++ APPS_UNCOMPILED/lib/chardet/latin1prober.py | 104 + .../lib/chardet/mbcharsetprober.py | 63 + .../lib/chardet/mbcsgroupprober.py | 31 + APPS_UNCOMPILED/lib/chardet/mbcssm.py | 280 + .../lib/chardet/metadata/__init__.py | 7 + .../lib/chardet/metadata/languages.py | 272 + .../lib/chardet/sbcharsetprober.py | 100 + .../lib/chardet/sbcsgroupprober.py | 42 + APPS_UNCOMPILED/lib/chardet/sjisprober.py | 66 + .../lib/chardet/universaldetector.py | 204 + APPS_UNCOMPILED/lib/chardet/utf8prober.py | 57 + APPS_UNCOMPILED/lib/chardet/version.py | 15 + APPS_UNCOMPILED/lib/deprecation.py | 176 + APPS_UNCOMPILED/lib/h2/__init__.py | 14 + APPS_UNCOMPILED/lib/h2/config.py | 80 + APPS_UNCOMPILED/lib/h2/connection.py | 1688 +++++ APPS_UNCOMPILED/lib/h2/errors.py | 72 + APPS_UNCOMPILED/lib/h2/events.py | 277 + APPS_UNCOMPILED/lib/h2/exceptions.py | 106 + APPS_UNCOMPILED/lib/h2/frame_buffer.py | 135 + APPS_UNCOMPILED/lib/h2/settings.py | 237 + APPS_UNCOMPILED/lib/h2/stream.py | 1196 ++++ APPS_UNCOMPILED/lib/h2/utilities.py | 430 ++ APPS_UNCOMPILED/lib/h2/windows.py | 123 + APPS_UNCOMPILED/lib/hpack/__init__.py | 22 + APPS_UNCOMPILED/lib/hpack/compat.py | 52 + APPS_UNCOMPILED/lib/hpack/exceptions.py | 32 + APPS_UNCOMPILED/lib/hpack/hpack.py | 443 ++ APPS_UNCOMPILED/lib/hpack/hpack_compat.py | 101 + APPS_UNCOMPILED/lib/hpack/huffman.py | 51 + .../lib/hpack/huffman_constants.py | 305 + APPS_UNCOMPILED/lib/hpack/huffman_table.py | 7 + APPS_UNCOMPILED/lib/hpack/struct.py | 27 + APPS_UNCOMPILED/lib/hpack/table.py | 162 + APPS_UNCOMPILED/lib/jsonpath.py | 297 + APPS_UNCOMPILED/lib/jwt/__init__.py | 21 + APPS_UNCOMPILED/lib/jwt/__main__.py | 111 + APPS_UNCOMPILED/lib/jwt/algorithms.py | 316 + APPS_UNCOMPILED/lib/jwt/api_jws.py | 186 + APPS_UNCOMPILED/lib/jwt/api_jwt.py | 170 + APPS_UNCOMPILED/lib/jwt/compat.py | 66 + APPS_UNCOMPILED/lib/jwt/contrib/__init__.py | 7 + .../lib/jwt/contrib/algorithms/__init__.py | 7 + .../lib/jwt/contrib/algorithms/py_ecdsa.py | 43 + .../lib/jwt/contrib/algorithms/pycrypto.py | 38 + APPS_UNCOMPILED/lib/jwt/exceptions.py | 65 + APPS_UNCOMPILED/lib/jwt/help.py | 59 + APPS_UNCOMPILED/lib/jwt/utils.py | 101 + APPS_UNCOMPILED/lib/libevent.py | 44 + APPS_UNCOMPILED/lib/linkkit/__init__.py | 8 + APPS_UNCOMPILED/lib/linkkit/h2client.py | 496 ++ APPS_UNCOMPILED/lib/linkkit/linkkit.py | 1964 ++++++ APPS_UNCOMPILED/lib/linksdktest/__init__.py | 7 + .../lib/linksdktest/common/__init__.py | 18 + .../lib/linksdktest/common/method.py | 7 + APPS_UNCOMPILED/lib/linksdktest/common/var.py | 52 + APPS_UNCOMPILED/lib/mcprotocol/__init__.py | 16 + APPS_UNCOMPILED/lib/mcprotocol/comfx.py | 313 + APPS_UNCOMPILED/lib/mcprotocol/mcconst.py | 566 ++ APPS_UNCOMPILED/lib/mcprotocol/mcerror.py | 27 + APPS_UNCOMPILED/lib/mcprotocol/mcformat.py | 320 + APPS_UNCOMPILED/lib/mcprotocol/type1c.py | 241 + APPS_UNCOMPILED/lib/mcprotocol/type3c.py | 308 + APPS_UNCOMPILED/lib/mcprotocol/type3e.py | 725 ++ APPS_UNCOMPILED/lib/mcprotocol/type4c.py | 31 + APPS_UNCOMPILED/lib/mcprotocol/type4e.py | 61 + APPS_UNCOMPILED/lib/opcua/__init__.py | 18 + APPS_UNCOMPILED/lib/opcua/compat.py | 14 + APPS_UNCOMPILED/lib/opcua/crypto/__init__.py | 7 + .../lib/opcua/crypto/security_policies.py | 501 ++ APPS_UNCOMPILED/lib/opcua/crypto/uacrypto.py | 197 + APPS_UNCOMPILED/lib/opcua/tools.py | 699 ++ APPS_UNCOMPILED/lib/opcua/ua/attribute_ids.py | 37 + APPS_UNCOMPILED/lib/opcua/ua/object_ids.py | 7 + .../lib/opcua/ua/uaprotocol_hand.py | 353 + APPS_UNCOMPILED/lib/packaging/__about__.py | 25 + APPS_UNCOMPILED/lib/packaging/__init__.py | 18 + APPS_UNCOMPILED/lib/packaging/_compat.py | 33 + APPS_UNCOMPILED/lib/packaging/_structures.py | 72 + APPS_UNCOMPILED/lib/packaging/_typing.py | 43 + APPS_UNCOMPILED/lib/packaging/markers.py | 237 + APPS_UNCOMPILED/lib/packaging/requirements.py | 108 + APPS_UNCOMPILED/lib/packaging/specifiers.py | 483 ++ APPS_UNCOMPILED/lib/packaging/tags.py | 658 ++ APPS_UNCOMPILED/lib/packaging/utils.py | 47 + APPS_UNCOMPILED/lib/packaging/version.py | 354 + APPS_UNCOMPILED/lib/paho/__init__.py | 7 + APPS_UNCOMPILED/lib/paho/mqtt/__init__.py | 11 + APPS_UNCOMPILED/lib/paho/mqtt/client.py | 3183 +++++++++ APPS_UNCOMPILED/lib/paho/mqtt/matcher.py | 91 + APPS_UNCOMPILED/lib/paho/mqtt/packettypes.py | 38 + APPS_UNCOMPILED/lib/paho/mqtt/properties.py | 455 ++ APPS_UNCOMPILED/lib/paho/mqtt/publish.py | 210 + APPS_UNCOMPILED/lib/paho/mqtt/reasoncodes.py | 193 + APPS_UNCOMPILED/lib/paho/mqtt/subscribe.py | 236 + .../lib/paho/mqtt/subscribeoptions.py | 93 + APPS_UNCOMPILED/lib/pycomm3/__init__.py | 18 + APPS_UNCOMPILED/lib/pycomm3/_version.py | 9 + APPS_UNCOMPILED/lib/pycomm3/bytes_.py | 135 + APPS_UNCOMPILED/lib/pycomm3/cip_base.py | 486 ++ APPS_UNCOMPILED/lib/pycomm3/clx.py | 1181 ++++ APPS_UNCOMPILED/lib/pycomm3/clx_legacy.py | 822 +++ APPS_UNCOMPILED/lib/pycomm3/com_server.py | 55 + APPS_UNCOMPILED/lib/pycomm3/const.py | 1661 +++++ APPS_UNCOMPILED/lib/pycomm3/exceptions.py | 23 + APPS_UNCOMPILED/lib/pycomm3/map.py | 34 + .../lib/pycomm3/packets/__init__.py | 31 + .../lib/pycomm3/packets/requests.py | 661 ++ .../lib/pycomm3/packets/responses.py | 474 ++ APPS_UNCOMPILED/lib/pycomm3/slc.py | 321 + APPS_UNCOMPILED/lib/pycomm3/socket_.py | 63 + APPS_UNCOMPILED/lib/pycomm3/tag.py | 26 + APPS_UNCOMPILED/lib/pyparsing.py | 5963 +++++++++++++++++ APPS_UNCOMPILED/lib/pyrsistent/__init__.py | 27 + .../lib/pyrsistent/_checked_types.py | 439 ++ APPS_UNCOMPILED/lib/pyrsistent/_compat.py | 20 + .../lib/pyrsistent/_field_common.py | 289 + APPS_UNCOMPILED/lib/pyrsistent/_helpers.py | 89 + APPS_UNCOMPILED/lib/pyrsistent/_immutable.py | 85 + APPS_UNCOMPILED/lib/pyrsistent/_pbag.py | 251 + APPS_UNCOMPILED/lib/pyrsistent/_pclass.py | 229 + APPS_UNCOMPILED/lib/pyrsistent/_pdeque.py | 326 + APPS_UNCOMPILED/lib/pyrsistent/_plist.py | 273 + APPS_UNCOMPILED/lib/pyrsistent/_pmap.py | 416 ++ APPS_UNCOMPILED/lib/pyrsistent/_precord.py | 134 + APPS_UNCOMPILED/lib/pyrsistent/_pset.py | 200 + APPS_UNCOMPILED/lib/pyrsistent/_pvector.py | 649 ++ APPS_UNCOMPILED/lib/pyrsistent/_toolz.py | 88 + .../lib/pyrsistent/_transformations.py | 130 + APPS_UNCOMPILED/lib/pyrsistent/typing.py | 94 + APPS_UNCOMPILED/lib/serpent.py | 506 ++ APPS_UNCOMPILED/lib/sftpFunc.py | 105 + APPS_UNCOMPILED/lib/six.py | 914 +++ APPS_UNCOMPILED/lib/snap7/__init__.py | 22 + APPS_UNCOMPILED/lib/snap7/bin/__init__.py | 8 + APPS_UNCOMPILED/lib/snap7/bin/snap7-server.py | 44 + APPS_UNCOMPILED/lib/snap7/client.py | 592 ++ APPS_UNCOMPILED/lib/snap7/common.py | 112 + APPS_UNCOMPILED/lib/snap7/error.py | 101 + APPS_UNCOMPILED/lib/snap7/logo.py | 277 + APPS_UNCOMPILED/lib/snap7/partner.py | 218 + APPS_UNCOMPILED/lib/snap7/server.py | 279 + APPS_UNCOMPILED/lib/snap7/six.py | 721 ++ APPS_UNCOMPILED/lib/snap7/snap7exceptions.py | 11 + APPS_UNCOMPILED/lib/snap7/snap7types.py | 236 + APPS_UNCOMPILED/lib/snap7/util.py | 420 ++ APPS_UNCOMPILED/lib/socks.py | 685 ++ APPS_UNCOMPILED/lib/sockshandler.py | 133 + APPS_UNCOMPILED/lib/sparkPlugB_pb2.py | 1790 +++++ APPS_UNCOMPILED/lib/timeseries/__init__.py | 23 + .../lib/timeseries/clients/__init__.py | 9 + .../timeseries/clients/time_series_client.py | 108 + .../lib/timeseries/models/__init__.py | 22 + .../lib/timeseries/models/badrequest.py | 102 + .../models/delete_timeseries_request.py | 146 + .../lib/timeseries/models/error.py | 102 + .../models/get_timeseries_request.py | 209 + .../lib/timeseries/models/notfound.py | 102 + .../models/put_timeseries_request.py | 124 + .../lib/timeseries/models/timeseries.py | 104 + .../lib/timeseries/models/toomanyrequests.py | 102 + .../lib/timeseries/models/unauthorized.py | 102 + APPS_UNCOMPILED/lib/typing_extensions.py | 2116 ++++++ APPS_UNCOMPILED/lib/zipp.py | 240 + APPS_UNCOMPILED/src/__init__.py | 7 + APPS_UNCOMPILED/src/adapter/__init__.py | 15 + APPS_UNCOMPILED/src/adapter/opcua_server.py | 208 + APPS_UNCOMPILED/src/common/AlarmPolicy.py | 49 + APPS_UNCOMPILED/src/common/CloudType.py | 17 + APPS_UNCOMPILED/src/common/ConfigMisc.py | 70 + APPS_UNCOMPILED/src/common/ConfigParser.py | 143 + APPS_UNCOMPILED/src/common/Constant.py | 106 + APPS_UNCOMPILED/src/common/Controller.py | 91 + APPS_UNCOMPILED/src/common/DataType.py | 85 + APPS_UNCOMPILED/src/common/ErlangAPI.py | 122 + APPS_UNCOMPILED/src/common/Error.py | 46 + APPS_UNCOMPILED/src/common/InDB.py | 653 ++ APPS_UNCOMPILED/src/common/InternalPath.py | 35 + APPS_UNCOMPILED/src/common/InternalTopic.py | 65 + APPS_UNCOMPILED/src/common/LocalSetting.py | 24 + APPS_UNCOMPILED/src/common/Logger.py | 111 + APPS_UNCOMPILED/src/common/MQClient.py | 524 ++ APPS_UNCOMPILED/src/common/MeasureGroup.py | 33 + APPS_UNCOMPILED/src/common/MeasurePoint.py | 52 + APPS_UNCOMPILED/src/common/MobiusAPI.py | 121 + APPS_UNCOMPILED/src/common/Permission.py | 15 + APPS_UNCOMPILED/src/common/Protocol.py | 103 + APPS_UNCOMPILED/src/common/Renjie.py | 929 +++ APPS_UNCOMPILED/src/common/SendType.py | 15 + APPS_UNCOMPILED/src/common/ServiceID.py | 27 + APPS_UNCOMPILED/src/common/SouthConfig.py | 129 + APPS_UNCOMPILED/src/common/TriggerType.py | 18 + APPS_UNCOMPILED/src/common/Utilities.py | 154 + APPS_UNCOMPILED/src/common/__init__.py | 7 + .../__pycache__/InternalTopic.cpython-312.pyc | Bin 0 -> 4981 bytes .../common/__pycache__/Logger.cpython-312.pyc | Bin 0 -> 6631 bytes .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 185 bytes APPS_UNCOMPILED/src/drivers/Drivers.py | 430 ++ APPS_UNCOMPILED/src/drivers/EnIPDriver.py | 291 + APPS_UNCOMPILED/src/drivers/MCDriver.py | 519 ++ APPS_UNCOMPILED/src/drivers/OpcDaDriver.py | 711 ++ APPS_UNCOMPILED/src/drivers/OpcUaDriver.py | 527 ++ APPS_UNCOMPILED/src/drivers/S7Driver.py | 844 +++ APPS_UNCOMPILED/src/drivers/__init__.py | 15 + APPS_UNCOMPILED/src/drvr.py | 39 + APPS_UNCOMPILED/src/main.py | 42 + APPS_UNCOMPILED/src/master/AdaptConfig.py | 910 +++ APPS_UNCOMPILED/src/master/Config.py | 1182 ++++ APPS_UNCOMPILED/src/master/ConfigMerge.py | 62 + APPS_UNCOMPILED/src/master/ConfigSchema.py | 746 +++ APPS_UNCOMPILED/src/master/DriverTls.py | 124 + APPS_UNCOMPILED/src/master/Erlang.py | 736 ++ APPS_UNCOMPILED/src/master/Master.py | 568 ++ APPS_UNCOMPILED/src/master/Services.py | 7 + APPS_UNCOMPILED/src/master/Web.py | 5628 ++++++++++++++++ APPS_UNCOMPILED/src/master/__init__.py | 7 + APPS_UNCOMPILED/src/mindsphere.py | 42 + APPS_UNCOMPILED/src/mindsphere/__init__.py | 7 + .../src/mindsphere/mindspherePut.py | 88 + .../src/mindsphere/mindsphere_function.py | 127 + APPS_UNCOMPILED/src/opcuasvr.py | 27 + APPS_UNCOMPILED/src/qckfs.py | 46 + APPS_UNCOMPILED/src/quickfaas/LWTSDB.py | 61 + APPS_UNCOMPILED/src/quickfaas/LwTimer.py | 109 + APPS_UNCOMPILED/src/quickfaas/OpcuaMethod.py | 55 + APPS_UNCOMPILED/src/quickfaas/__init__.py | 18 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 472 bytes .../__pycache__/config.cpython-312.pyc | Bin 0 -> 1470 bytes .../__pycache__/controller.cpython-312.pyc | Bin 0 -> 3043 bytes .../quick_function.cpython-312.pyc | Bin 0 -> 7836 bytes .../__pycache__/service_id.cpython-312.pyc | Bin 0 -> 1452 bytes .../__pycache__/transport.cpython-312.pyc | Bin 0 -> 31661 bytes APPS_UNCOMPILED/src/quickfaas/aliyuniot.py | 334 + APPS_UNCOMPILED/src/quickfaas/awsiot.py | 455 ++ APPS_UNCOMPILED/src/quickfaas/azureiot.py | 151 + APPS_UNCOMPILED/src/quickfaas/clouds.py | 32 + APPS_UNCOMPILED/src/quickfaas/config.py | 34 + APPS_UNCOMPILED/src/quickfaas/controller.py | 86 + APPS_UNCOMPILED/src/quickfaas/ds1_wizard.py | 172 + APPS_UNCOMPILED/src/quickfaas/faas_handler.py | 855 +++ APPS_UNCOMPILED/src/quickfaas/file.py | 40 + APPS_UNCOMPILED/src/quickfaas/global_dict.py | 29 + APPS_UNCOMPILED/src/quickfaas/measure.py | 260 + APPS_UNCOMPILED/src/quickfaas/messagebus.py | 19 + .../src/quickfaas/mqttSparkPlugB.py | 327 + .../src/quickfaas/packet_handler.py | 45 + .../src/quickfaas/quick_function.py | 126 + APPS_UNCOMPILED/src/quickfaas/remotebus.py | 33 + APPS_UNCOMPILED/src/quickfaas/service_id.py | 38 + APPS_UNCOMPILED/src/quickfaas/transport.py | 525 ++ .../ba_facility/thingsboard/pub/sendData.py | 145 + .../thingsboard/sub/receiveCommands.py | 75 + Pub_Sub/ba_facility/thingsboard/tags.csv | 147 + .../thingsboard/cameratrailer_tb_v6.cfg | 2 +- Pub_Sub/rr_facility/thingsboard/alarm.csv | 2 + ..._facility.csv => rr_facility_measures.csv} | 636 +- code snippets/getPLCData.ipynb | 33 +- code snippets/tag_dump.json | 4208 ++++++++++-- 399 files changed, 127983 insertions(+), 1021 deletions(-) create mode 100644 APPS_UNCOMPILED/lib/OpenOPC.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/__init__.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/configuration.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/constants.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/core.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/errors.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/futures.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/message.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/naming.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/naming_storage.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/nsc.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/socketserver/__init__.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/socketserver/existingconnectionserver.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/socketserver/multiplexserver.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/socketserver/threadpool.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/socketserver/threadpoolserver.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/socketutil.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/test/__init__.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/test/echoserver.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/util.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/utils/__init__.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/utils/flame.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/utils/flameserver.py create mode 100644 APPS_UNCOMPILED/lib/Pyro4/utils/httpgateway.py create mode 100644 APPS_UNCOMPILED/lib/autologging.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/aio/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/aio/patch_documentation.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/async_adapter.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/asyncio_compat.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/auth/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/auth/connection_string.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/auth/sastoken.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/auth/signing_mechanism.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/callable_weak_method.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/chainable_exception.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/evented_callback.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/handle_exceptions.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/http_transport.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/models/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/models/proxy_options.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/models/x509.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/mqtt_transport.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/config.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_events_base.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_events_mqtt.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_exceptions.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_base.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_http.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_mqtt.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_base.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_http.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_mqtt.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_thread.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/transport_exceptions.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/common/version_compat.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/constant.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/exceptions.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/abstract_clients.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_clients.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_handler_manager.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_inbox.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/loop_management.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/edge_hsm.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/inbox_manager.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/message.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/methods.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/twin.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/config.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/constant.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/exceptions.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_map_error.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_path_iothub.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_pipeline.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/mqtt_pipeline.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/mqtt_topic_iothub.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_events_iothub.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub_http.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_http.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_mqtt.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_clients.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_handler_manager.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_inbox.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/patch.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/patch_documentation.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/abstract_provisioning_device_client.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/aio/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/aio/async_provisioning_device_client.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/models/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/models/registration_result.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/__init__.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/config.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/constant.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/exceptions.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/mqtt_topic_provisioning.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning_mqtt.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/provisioning/provisioning_device_client.py create mode 100644 APPS_UNCOMPILED/lib/azure/iot/device/user_agent.py create mode 100644 APPS_UNCOMPILED/lib/chardet/__init__.py create mode 100644 APPS_UNCOMPILED/lib/chardet/big5freq.py create mode 100644 APPS_UNCOMPILED/lib/chardet/big5prober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/chardistribution.py create mode 100644 APPS_UNCOMPILED/lib/chardet/charsetgroupprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/charsetprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/cli/__init__.py create mode 100644 APPS_UNCOMPILED/lib/chardet/cli/chardetect.py create mode 100644 APPS_UNCOMPILED/lib/chardet/codingstatemachine.py create mode 100644 APPS_UNCOMPILED/lib/chardet/compat.py create mode 100644 APPS_UNCOMPILED/lib/chardet/cp949prober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/enums.py create mode 100644 APPS_UNCOMPILED/lib/chardet/escprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/escsm.py create mode 100644 APPS_UNCOMPILED/lib/chardet/eucjpprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/euckrfreq.py create mode 100644 APPS_UNCOMPILED/lib/chardet/euckrprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/euctwfreq.py create mode 100644 APPS_UNCOMPILED/lib/chardet/euctwprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/gb2312freq.py create mode 100644 APPS_UNCOMPILED/lib/chardet/gb2312prober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/hebrewprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/jisfreq.py create mode 100644 APPS_UNCOMPILED/lib/chardet/jpcntx.py create mode 100644 APPS_UNCOMPILED/lib/chardet/langbulgarianmodel.py create mode 100644 APPS_UNCOMPILED/lib/chardet/langgreekmodel.py create mode 100644 APPS_UNCOMPILED/lib/chardet/langhebrewmodel.py create mode 100644 APPS_UNCOMPILED/lib/chardet/langhungarianmodel.py create mode 100644 APPS_UNCOMPILED/lib/chardet/langrussianmodel.py create mode 100644 APPS_UNCOMPILED/lib/chardet/langthaimodel.py create mode 100644 APPS_UNCOMPILED/lib/chardet/langturkishmodel.py create mode 100644 APPS_UNCOMPILED/lib/chardet/latin1prober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/mbcharsetprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/mbcsgroupprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/mbcssm.py create mode 100644 APPS_UNCOMPILED/lib/chardet/metadata/__init__.py create mode 100644 APPS_UNCOMPILED/lib/chardet/metadata/languages.py create mode 100644 APPS_UNCOMPILED/lib/chardet/sbcharsetprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/sbcsgroupprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/sjisprober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/universaldetector.py create mode 100644 APPS_UNCOMPILED/lib/chardet/utf8prober.py create mode 100644 APPS_UNCOMPILED/lib/chardet/version.py create mode 100644 APPS_UNCOMPILED/lib/deprecation.py create mode 100644 APPS_UNCOMPILED/lib/h2/__init__.py create mode 100644 APPS_UNCOMPILED/lib/h2/config.py create mode 100644 APPS_UNCOMPILED/lib/h2/connection.py create mode 100644 APPS_UNCOMPILED/lib/h2/errors.py create mode 100644 APPS_UNCOMPILED/lib/h2/events.py create mode 100644 APPS_UNCOMPILED/lib/h2/exceptions.py create mode 100644 APPS_UNCOMPILED/lib/h2/frame_buffer.py create mode 100644 APPS_UNCOMPILED/lib/h2/settings.py create mode 100644 APPS_UNCOMPILED/lib/h2/stream.py create mode 100644 APPS_UNCOMPILED/lib/h2/utilities.py create mode 100644 APPS_UNCOMPILED/lib/h2/windows.py create mode 100644 APPS_UNCOMPILED/lib/hpack/__init__.py create mode 100644 APPS_UNCOMPILED/lib/hpack/compat.py create mode 100644 APPS_UNCOMPILED/lib/hpack/exceptions.py create mode 100644 APPS_UNCOMPILED/lib/hpack/hpack.py create mode 100644 APPS_UNCOMPILED/lib/hpack/hpack_compat.py create mode 100644 APPS_UNCOMPILED/lib/hpack/huffman.py create mode 100644 APPS_UNCOMPILED/lib/hpack/huffman_constants.py create mode 100644 APPS_UNCOMPILED/lib/hpack/huffman_table.py create mode 100644 APPS_UNCOMPILED/lib/hpack/struct.py create mode 100644 APPS_UNCOMPILED/lib/hpack/table.py create mode 100644 APPS_UNCOMPILED/lib/jsonpath.py create mode 100644 APPS_UNCOMPILED/lib/jwt/__init__.py create mode 100644 APPS_UNCOMPILED/lib/jwt/__main__.py create mode 100644 APPS_UNCOMPILED/lib/jwt/algorithms.py create mode 100644 APPS_UNCOMPILED/lib/jwt/api_jws.py create mode 100644 APPS_UNCOMPILED/lib/jwt/api_jwt.py create mode 100644 APPS_UNCOMPILED/lib/jwt/compat.py create mode 100644 APPS_UNCOMPILED/lib/jwt/contrib/__init__.py create mode 100644 APPS_UNCOMPILED/lib/jwt/contrib/algorithms/__init__.py create mode 100644 APPS_UNCOMPILED/lib/jwt/contrib/algorithms/py_ecdsa.py create mode 100644 APPS_UNCOMPILED/lib/jwt/contrib/algorithms/pycrypto.py create mode 100644 APPS_UNCOMPILED/lib/jwt/exceptions.py create mode 100644 APPS_UNCOMPILED/lib/jwt/help.py create mode 100644 APPS_UNCOMPILED/lib/jwt/utils.py create mode 100644 APPS_UNCOMPILED/lib/libevent.py create mode 100644 APPS_UNCOMPILED/lib/linkkit/__init__.py create mode 100644 APPS_UNCOMPILED/lib/linkkit/h2client.py create mode 100644 APPS_UNCOMPILED/lib/linkkit/linkkit.py create mode 100644 APPS_UNCOMPILED/lib/linksdktest/__init__.py create mode 100644 APPS_UNCOMPILED/lib/linksdktest/common/__init__.py create mode 100644 APPS_UNCOMPILED/lib/linksdktest/common/method.py create mode 100644 APPS_UNCOMPILED/lib/linksdktest/common/var.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/__init__.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/comfx.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/mcconst.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/mcerror.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/mcformat.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/type1c.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/type3c.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/type3e.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/type4c.py create mode 100644 APPS_UNCOMPILED/lib/mcprotocol/type4e.py create mode 100644 APPS_UNCOMPILED/lib/opcua/__init__.py create mode 100644 APPS_UNCOMPILED/lib/opcua/compat.py create mode 100644 APPS_UNCOMPILED/lib/opcua/crypto/__init__.py create mode 100644 APPS_UNCOMPILED/lib/opcua/crypto/security_policies.py create mode 100644 APPS_UNCOMPILED/lib/opcua/crypto/uacrypto.py create mode 100644 APPS_UNCOMPILED/lib/opcua/tools.py create mode 100644 APPS_UNCOMPILED/lib/opcua/ua/attribute_ids.py create mode 100644 APPS_UNCOMPILED/lib/opcua/ua/object_ids.py create mode 100644 APPS_UNCOMPILED/lib/opcua/ua/uaprotocol_hand.py create mode 100644 APPS_UNCOMPILED/lib/packaging/__about__.py create mode 100644 APPS_UNCOMPILED/lib/packaging/__init__.py create mode 100644 APPS_UNCOMPILED/lib/packaging/_compat.py create mode 100644 APPS_UNCOMPILED/lib/packaging/_structures.py create mode 100644 APPS_UNCOMPILED/lib/packaging/_typing.py create mode 100644 APPS_UNCOMPILED/lib/packaging/markers.py create mode 100644 APPS_UNCOMPILED/lib/packaging/requirements.py create mode 100644 APPS_UNCOMPILED/lib/packaging/specifiers.py create mode 100644 APPS_UNCOMPILED/lib/packaging/tags.py create mode 100644 APPS_UNCOMPILED/lib/packaging/utils.py create mode 100644 APPS_UNCOMPILED/lib/packaging/version.py create mode 100644 APPS_UNCOMPILED/lib/paho/__init__.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/__init__.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/client.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/matcher.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/packettypes.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/properties.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/publish.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/reasoncodes.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/subscribe.py create mode 100644 APPS_UNCOMPILED/lib/paho/mqtt/subscribeoptions.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/__init__.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/_version.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/bytes_.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/cip_base.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/clx.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/clx_legacy.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/com_server.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/const.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/exceptions.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/map.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/packets/__init__.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/packets/requests.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/packets/responses.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/slc.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/socket_.py create mode 100644 APPS_UNCOMPILED/lib/pycomm3/tag.py create mode 100644 APPS_UNCOMPILED/lib/pyparsing.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/__init__.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_checked_types.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_compat.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_field_common.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_helpers.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_immutable.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_pbag.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_pclass.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_pdeque.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_plist.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_pmap.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_precord.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_pset.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_pvector.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_toolz.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/_transformations.py create mode 100644 APPS_UNCOMPILED/lib/pyrsistent/typing.py create mode 100644 APPS_UNCOMPILED/lib/serpent.py create mode 100644 APPS_UNCOMPILED/lib/sftpFunc.py create mode 100644 APPS_UNCOMPILED/lib/six.py create mode 100644 APPS_UNCOMPILED/lib/snap7/__init__.py create mode 100644 APPS_UNCOMPILED/lib/snap7/bin/__init__.py create mode 100644 APPS_UNCOMPILED/lib/snap7/bin/snap7-server.py create mode 100644 APPS_UNCOMPILED/lib/snap7/client.py create mode 100644 APPS_UNCOMPILED/lib/snap7/common.py create mode 100644 APPS_UNCOMPILED/lib/snap7/error.py create mode 100644 APPS_UNCOMPILED/lib/snap7/logo.py create mode 100644 APPS_UNCOMPILED/lib/snap7/partner.py create mode 100644 APPS_UNCOMPILED/lib/snap7/server.py create mode 100644 APPS_UNCOMPILED/lib/snap7/six.py create mode 100644 APPS_UNCOMPILED/lib/snap7/snap7exceptions.py create mode 100644 APPS_UNCOMPILED/lib/snap7/snap7types.py create mode 100644 APPS_UNCOMPILED/lib/snap7/util.py create mode 100644 APPS_UNCOMPILED/lib/socks.py create mode 100644 APPS_UNCOMPILED/lib/sockshandler.py create mode 100644 APPS_UNCOMPILED/lib/sparkPlugB_pb2.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/__init__.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/clients/__init__.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/clients/time_series_client.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/__init__.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/badrequest.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/delete_timeseries_request.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/error.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/get_timeseries_request.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/notfound.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/put_timeseries_request.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/timeseries.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/toomanyrequests.py create mode 100644 APPS_UNCOMPILED/lib/timeseries/models/unauthorized.py create mode 100644 APPS_UNCOMPILED/lib/typing_extensions.py create mode 100644 APPS_UNCOMPILED/lib/zipp.py create mode 100644 APPS_UNCOMPILED/src/__init__.py create mode 100644 APPS_UNCOMPILED/src/adapter/__init__.py create mode 100644 APPS_UNCOMPILED/src/adapter/opcua_server.py create mode 100644 APPS_UNCOMPILED/src/common/AlarmPolicy.py create mode 100644 APPS_UNCOMPILED/src/common/CloudType.py create mode 100644 APPS_UNCOMPILED/src/common/ConfigMisc.py create mode 100644 APPS_UNCOMPILED/src/common/ConfigParser.py create mode 100644 APPS_UNCOMPILED/src/common/Constant.py create mode 100644 APPS_UNCOMPILED/src/common/Controller.py create mode 100644 APPS_UNCOMPILED/src/common/DataType.py create mode 100644 APPS_UNCOMPILED/src/common/ErlangAPI.py create mode 100644 APPS_UNCOMPILED/src/common/Error.py create mode 100644 APPS_UNCOMPILED/src/common/InDB.py create mode 100644 APPS_UNCOMPILED/src/common/InternalPath.py create mode 100644 APPS_UNCOMPILED/src/common/InternalTopic.py create mode 100644 APPS_UNCOMPILED/src/common/LocalSetting.py create mode 100644 APPS_UNCOMPILED/src/common/Logger.py create mode 100644 APPS_UNCOMPILED/src/common/MQClient.py create mode 100644 APPS_UNCOMPILED/src/common/MeasureGroup.py create mode 100644 APPS_UNCOMPILED/src/common/MeasurePoint.py create mode 100644 APPS_UNCOMPILED/src/common/MobiusAPI.py create mode 100644 APPS_UNCOMPILED/src/common/Permission.py create mode 100644 APPS_UNCOMPILED/src/common/Protocol.py create mode 100644 APPS_UNCOMPILED/src/common/Renjie.py create mode 100644 APPS_UNCOMPILED/src/common/SendType.py create mode 100644 APPS_UNCOMPILED/src/common/ServiceID.py create mode 100644 APPS_UNCOMPILED/src/common/SouthConfig.py create mode 100644 APPS_UNCOMPILED/src/common/TriggerType.py create mode 100644 APPS_UNCOMPILED/src/common/Utilities.py create mode 100644 APPS_UNCOMPILED/src/common/__init__.py create mode 100644 APPS_UNCOMPILED/src/common/__pycache__/InternalTopic.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/common/__pycache__/Logger.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/common/__pycache__/__init__.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/drivers/Drivers.py create mode 100644 APPS_UNCOMPILED/src/drivers/EnIPDriver.py create mode 100644 APPS_UNCOMPILED/src/drivers/MCDriver.py create mode 100644 APPS_UNCOMPILED/src/drivers/OpcDaDriver.py create mode 100644 APPS_UNCOMPILED/src/drivers/OpcUaDriver.py create mode 100644 APPS_UNCOMPILED/src/drivers/S7Driver.py create mode 100644 APPS_UNCOMPILED/src/drivers/__init__.py create mode 100644 APPS_UNCOMPILED/src/drvr.py create mode 100644 APPS_UNCOMPILED/src/main.py create mode 100644 APPS_UNCOMPILED/src/master/AdaptConfig.py create mode 100644 APPS_UNCOMPILED/src/master/Config.py create mode 100644 APPS_UNCOMPILED/src/master/ConfigMerge.py create mode 100644 APPS_UNCOMPILED/src/master/ConfigSchema.py create mode 100644 APPS_UNCOMPILED/src/master/DriverTls.py create mode 100644 APPS_UNCOMPILED/src/master/Erlang.py create mode 100644 APPS_UNCOMPILED/src/master/Master.py create mode 100644 APPS_UNCOMPILED/src/master/Services.py create mode 100644 APPS_UNCOMPILED/src/master/Web.py create mode 100644 APPS_UNCOMPILED/src/master/__init__.py create mode 100644 APPS_UNCOMPILED/src/mindsphere.py create mode 100644 APPS_UNCOMPILED/src/mindsphere/__init__.py create mode 100644 APPS_UNCOMPILED/src/mindsphere/mindspherePut.py create mode 100644 APPS_UNCOMPILED/src/mindsphere/mindsphere_function.py create mode 100644 APPS_UNCOMPILED/src/opcuasvr.py create mode 100644 APPS_UNCOMPILED/src/qckfs.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/LWTSDB.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/LwTimer.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/OpcuaMethod.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/__init__.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/__pycache__/__init__.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/quickfaas/__pycache__/config.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/quickfaas/__pycache__/controller.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/quickfaas/__pycache__/quick_function.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/quickfaas/__pycache__/service_id.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/quickfaas/__pycache__/transport.cpython-312.pyc create mode 100644 APPS_UNCOMPILED/src/quickfaas/aliyuniot.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/awsiot.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/azureiot.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/clouds.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/config.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/controller.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/ds1_wizard.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/faas_handler.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/file.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/global_dict.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/measure.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/messagebus.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/mqttSparkPlugB.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/packet_handler.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/quick_function.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/remotebus.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/service_id.py create mode 100644 APPS_UNCOMPILED/src/quickfaas/transport.py create mode 100644 Pub_Sub/ba_facility/thingsboard/pub/sendData.py create mode 100644 Pub_Sub/ba_facility/thingsboard/sub/receiveCommands.py create mode 100644 Pub_Sub/ba_facility/thingsboard/tags.csv rename Pub_Sub/rr_facility/thingsboard/{rr_facility.csv => rr_facility_measures.csv} (99%) diff --git a/APPS_UNCOMPILED/lib/OpenOPC.py b/APPS_UNCOMPILED/lib/OpenOPC.py new file mode 100644 index 0000000..e9edc99 --- /dev/null +++ b/APPS_UNCOMPILED/lib/OpenOPC.py @@ -0,0 +1,1157 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/OpenOPC.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 42815 bytes +import os, sys, time, types, string, socket, re, Pyro4.core +from multiprocessing import Queue +__version__ = "1.2.0" +current_client = None +if os.name == "nt": + try: + import win32com.client, win32com.server.util, win32event, pythoncom, pywintypes, SystemHealth + pywintypes.datetime = pywintypes.TimeType + vt = dict([(pythoncom.__dict__[vtype], vtype) for vtype in pythoncom.__dict__.keys() if vtype[None[:2]] == "VT"]) + win32com.client.gencache.is_readonly = False + win32com.client.gencache.Rebuild(verbose=0) + except ImportError: + win32com_found = False + else: + win32com_found = True +else: + win32com_found = False +SOURCE_CACHE = 1 +SOURCE_DEVICE = 2 +OPC_STATUS = (0, 'Running', 'Failed', 'NoConfig', 'Suspended', 'Test') +BROWSER_TYPE = (0, 'Hierarchical', 'Flat') +ACCESS_RIGHTS = (0, 'Read', 'Write', 'Read/Write') +OPC_QUALITY = ('Bad', 'Uncertain', 'Unknown', 'Good') +OPC_CLASS = "Matrikon.OPC.Automation;Graybox.OPC.DAWrapper;HSCOPC.Automation;RSI.OPCAutomation;OPC.Automation" +OPC_SERVER = "Hci.TPNServer;HwHsc.OPCServer;opc.deltav.1;AIM.OPC.1;Yokogawa.ExaopcDAEXQ.1;OSI.DA.1;OPC.PHDServerDA.1;Aspen.Infoplus21_DA.1;National Instruments.OPCLabVIEW;RSLinx OPC Server;KEPware.KEPServerEx.V4;Matrikon.OPC.Simulation;Prosys.OPC.Simulation;CCOPC.XMLWrapper.1;OPC.SimaticHMI.CoRtHmiRTm.1" +OPC_CLIENT = "OpenOPC" + +def quality_str(quality_bits): + """Convert OPC quality bits to a descriptive string""" + quality = quality_bits >> 6 & 3 + return OPC_QUALITY[quality] + + +def type_check(tags): + """Perform a type check on a list of tags""" + if type(tags) in (list, tuple): + single = False + else: + if tags == None: + tags = [] + single = False + else: + tags = [ + tags] + single = True + if len([t for t in tags if type(t) not in (str, bytes)]) == 0: + valid = True + else: + valid = False + return (tags, single, valid) + + +def wild2regex(string): + """Convert a Unix wildcard glob into a regular expression""" + return string.replace(".", "\\.").replace("*", ".*").replace("?", ".").replace("!", "^") + + +def tags2trace(tags): + """Convert a list tags into a formatted string suitable for the trace callback log""" + arg_str = "" + for i, t in enumerate(tags[1[:None]]): + if i > 0: + arg_str += "," + arg_str += "%s" % t + + return arg_str + + +def exceptional(func, alt_return=None, alt_exceptions=(Exception,), final=None, catch=None): + """Turns exceptions into an alternative return value""" + + def _exceptional(*args, **kwargs): + try: + try: + return func(*args, **kwargs) + except alt_exceptions: + return alt_return + except: + if catch: + return catch(sys.exc_info(), (lambda: func(*args, **kwargs))) + raise + + finally: + if final: + final() + + return _exceptional + + +def get_sessions(host='localhost', port=7766): + """Return sessions in OpenOPC Gateway Service as GUID:host hash""" + import Pyro4.core + server_obj = Pyro4.Proxy("PYRO:opc@{0}:{1}".format(host, port)) + return server_obj.get_clients() + + +def open_client(host='localhost', port=7766, debug_log=None): + """Connect to the specified OpenOPC Gateway Service""" + import Pyro4.core + server_obj = Pyro4.Proxy("PYRO:opc@{0}:{1}".format(host, port)) + server_obj.opcda_set_debug_log(debug_log) + return server_obj.create_client() + + +class TimeoutError(Exception): + + def __init__(self, txt): + Exception.__init__(self, txt) + + +class OPCError(Exception): + + def __init__(self, txt): + Exception.__init__(self, txt) + + +class GroupEvents: + + def __init__(self): + global current_client + self.client = current_client + + def OnDataChange(self, TransactionID, NumItems, ClientHandles, ItemValues, Qualities, TimeStamps): + self.client.callback_queue.put((TransactionID, ClientHandles, ItemValues, Qualities, TimeStamps)) + + +@Pyro4.expose +class client: + + def __init__(self, opc_class=None, client_name=None): + """Instantiate OPC automation class""" + self.callback_queue = Queue() + pythoncom.CoInitialize() + if opc_class == None: + if "OPC_CLASS" in os.environ: + opc_class = os.environ["OPC_CLASS"] + else: + opc_class = OPC_CLASS + opc_class_list = opc_class.split(";") + for i, c in enumerate(opc_class_list): + try: + self._opc = win32com.client.gencache.EnsureDispatch(c, 0) + self.opc_class = c + break + except pythoncom.com_error as err: + try: + if i == len(opc_class_list) - 1: + error_msg = "Dispatch: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + self._event = win32event.CreateEvent(None, 0, 0, None) + self.opc_server = None + self.opc_host = None + self.client_name = client_name + self._groups = {} + self._group_tags = {} + self._group_valid_tags = {} + self._group_server_handles = {} + self._group_handles_tag = {} + self._group_hooks = {} + self._open_serv = None + self._open_self = None + self._open_host = None + self._open_port = None + self._open_guid = None + self._prev_serv_time = None + self._tx_id = 0 + self.trace = None + self.cpu = None + + def set_trace(self, trace): + if self._open_serv == None: + self.trace = trace + + def connect(self, opc_server=None, opc_host='localhost'): + """Connect to the specified OPC server""" + pythoncom.CoInitialize() + if opc_server == None: + if self.opc_server == None: + if "OPC_SERVER" in os.environ: + opc_server = os.environ["OPC_SERVER"] + else: + opc_server = OPC_SERVER + else: + opc_server = self.opc_server + opc_host = self.opc_host + opc_server_list = opc_server.split(";") + connected = False + for s in opc_server_list: + try: + if self.trace: + self.trace("Connect(%s,%s)" % (s, opc_host)) + self._opc.Connect(s, opc_host) + except pythoncom.com_error as err: + try: + if len(opc_server_list) == 1: + error_msg = "Connect: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + else: + try: + if self.client_name == None: + if "OPC_CLIENT" in os.environ: + self._opc.ClientName = os.environ["OPC_CLIENT"] + else: + self._opc.ClientName = OPC_CLIENT + else: + self._opc.ClientName = self.client_name + except: + pass + + connected = True + break + + if not connected: + raise OPCError("Connect: Cannot connect to any of the servers in the OPC_SERVER list") + time.sleep(0.01) + self.opc_server = opc_server + if opc_host == "localhost": + opc_host = socket.gethostname() + self.opc_host = opc_host + self._groups = {} + self._group_tags = {} + self._group_valid_tags = {} + self._group_server_handles = {} + self._group_handles_tag = {} + self._group_hooks = {} + + def GUID(self): + return self._open_guid + + def close(self, del_object=True): + """Disconnect from the currently connected OPC server""" + try: + try: + pythoncom.CoInitialize() + self.remove(self.groups()) + except pythoncom.com_error as err: + try: + error_msg = "Disconnect: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + except OPCError: + pass + + finally: + if self.trace: + self.trace("Disconnect()") + self._opc.Disconnect() + if self._open_serv: + if del_object: + self._open_serv.release_client(self._open_self) + + def iread(self, tags=None, group=None, size=None, pause=0, source='hybrid', update=-1, timeout=5000, sync=False, include_error=False, rebuild=False): + """Iterable version of read()""" + global current_client + + def add_items(tags): + names = list(tags) + names.insert(0, 0) + errors = [] + if self.trace: + self.trace("Validate(%s)" % tags2trace(names)) + else: + try: + errors = opc_items.Validate(len(names) - 1, names) + except: + pass + + valid_tags = [] + valid_values = [] + client_handles = [] + if sub_group not in self._group_handles_tag: + self._group_handles_tag[sub_group] = {} + n = 0 + else: + if len(self._group_handles_tag[sub_group]) > 0: + n = max(self._group_handles_tag[sub_group]) + 1 + else: + n = 0 + for i, tag in enumerate(tags): + if errors[i] == 0: + valid_tags.append(tag) + client_handles.append(n) + self._group_handles_tag[sub_group][n] = tag + n += 1 + else: + if include_error: + error_msgs[tag] = self._opc.GetErrorString(errors[i]) + if self.trace and errors[i] != 0: + self.trace("%s failed validation" % tag) + + client_handles.insert(0, 0) + valid_tags.insert(0, 0) + server_handles = [] + errors = [] + if self.trace: + self.trace("AddItems(%s)" % tags2trace(valid_tags)) + try: + server_handles, errors = opc_items.AddItems(len(client_handles) - 1, valid_tags, client_handles) + except: + pass + + valid_tags_tmp = [] + server_handles_tmp = [] + valid_tags.pop(0) + if sub_group not in self._group_server_handles: + self._group_server_handles[sub_group] = {} + for i, tag in enumerate(valid_tags): + if errors[i] == 0: + valid_tags_tmp.append(tag) + server_handles_tmp.append(server_handles[i]) + self._group_server_handles[sub_group][tag] = server_handles[i] + + valid_tags = valid_tags_tmp + server_handles = server_handles_tmp + return ( + valid_tags, server_handles) + + def remove_items(tags): + if self.trace: + self.trace("RemoveItems(%s)" % tags2trace([""] + tags)) + server_handles = [self._group_server_handles[sub_group][tag] for tag in tags] + server_handles.insert(0, 0) + errors = [] + try: + errors = opc_items.Remove(len(server_handles) - 1, server_handles) + except pythoncom.com_error as err: + try: + error_msg = "RemoveItems: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + try: + self._update_tx_time() + pythoncom.CoInitialize() + if include_error: + sync = True + if sync: + update = -1 + tags, single, valid = type_check(tags) + if not valid: + raise TypeError("iread(): 'tags' parameter must be a string or a list of strings") + elif group in self._groups: + num_groups = rebuild or self._groups[group] + data_source = SOURCE_CACHE + else: + if size: + tag_groups = [tags[i[:i + size]] for i in range(0, len(tags), size)] + else: + tag_groups = [ + tags] + num_groups = len(tag_groups) + data_source = SOURCE_DEVICE + results = [] + for gid in range(num_groups): + if gid > 0: + if pause > 0: + time.sleep(pause / 1000.0) + else: + error_msgs = {} + opc_groups = self._opc.OPCGroups + opc_groups.DefaultGroupUpdateRate = update + if group == None: + try: + if self.trace: + self.trace("AddGroup()") + opc_group = opc_groups.Add() + except pythoncom.com_error as err: + try: + error_msg = "AddGroup: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + sub_group = group + new_group = True + else: + sub_group = "%s.%d" % (group, gid) + try: + if self.trace: + self.trace("GetOPCGroup(%s)" % sub_group) + opc_group = opc_groups.GetOPCGroup(sub_group) + new_group = False + except: + try: + if self.trace: + self.trace("AddGroup(%s)" % sub_group) + opc_group = opc_groups.Add(sub_group) + except pythoncom.com_error as err: + try: + error_msg = "AddGroup: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + self._groups[str(group)] = len(tag_groups) + new_group = True + + opc_items = opc_group.OPCItems + if new_group: + opc_group.IsSubscribed = 1 + opc_group.IsActive = 1 + if not sync: + if self.trace: + self.trace("WithEvents(%s)" % opc_group.Name) + current_client = self + self._group_hooks[opc_group.Name] = win32com.client.WithEvents(opc_group, GroupEvents) + tags = tag_groups[gid] + valid_tags, server_handles = add_items(tags) + self._group_tags[sub_group] = tags + self._group_valid_tags[sub_group] = valid_tags + else: + if rebuild: + tags = tag_groups[gid] + valid_tags = self._group_valid_tags[sub_group] + add_tags = [t for t in tags if t not in valid_tags] + del_tags = [t for t in valid_tags if t not in tags] + if len(add_tags) > 0: + valid_tags, server_handles = add_items(add_tags) + valid_tags = self._group_valid_tags[sub_group] + valid_tags + if len(del_tags) > 0: + remove_items(del_tags) + valid_tags = [t for t in valid_tags if t not in del_tags] + self._group_tags[sub_group] = tags + self._group_valid_tags[sub_group] = valid_tags + if source == "hybrid": + data_source = SOURCE_DEVICE + else: + tags = self._group_tags[sub_group] + valid_tags = self._group_valid_tags[sub_group] + if sync: + server_handles = [item.ServerHandle for item in opc_items] + tag_value = {} + tag_quality = {} + tag_time = {} + tag_error = {} + if sync: + values = [] + errors = [] + qualities = [] + timestamps = [] + if len(valid_tags) > 0: + server_handles.insert(0, 0) + if source != "hybrid": + data_source = SOURCE_CACHE if source == "cache" else SOURCE_DEVICE + if self.trace: + self.trace("SyncRead(%s)" % data_source) + try: + values, errors, qualities, timestamps = opc_group.SyncRead(data_source, len(server_handles) - 1, server_handles) + except pythoncom.com_error as err: + try: + error_msg = "SyncRead: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + for i, tag in enumerate(valid_tags): + tag_value[tag] = values[i] + tag_quality[tag] = qualities[i] + tag_time[tag] = timestamps[i] + tag_error[tag] = errors[i] + + elif len(valid_tags) > 0: + if self._tx_id >= 65535: + self._tx_id = 0 + else: + self._tx_id += 1 + if source != "hybrid": + data_source = SOURCE_CACHE if source == "cache" else SOURCE_DEVICE + if self.trace: + self.trace("AsyncRefresh(%s)" % data_source) + try: + opc_group.AsyncRefresh(data_source, self._tx_id) + except pythoncom.com_error as err: + try: + error_msg = "AsyncRefresh: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + tx_id = 0 + start = time.time() * 1000 + while tx_id != self._tx_id: + now = time.time() * 1000 + if now - start > timeout: + raise TimeoutError("Callback: Timeout waiting for data") + if self.callback_queue.empty(): + pythoncom.PumpWaitingMessages() + else: + tx_id, handles, values, qualities, timestamps = self.callback_queue.get() + + for i, h in enumerate(handles): + tag = self._group_handles_tag[sub_group][h] + tag_value[tag] = values[i] + tag_quality[tag] = qualities[i] + tag_time[tag] = timestamps[i] + + for tag in tags: + if tag in tag_value and not sync: + if not len(valid_tags) > 0 or sync or tag_error[tag] == 0: + value = tag_value[tag] + if type(value) == pywintypes.TimeType: + value = str(value) + quality = quality_str(tag_quality[tag]) + timestamp = str(tag_time[tag]) + else: + value = None + quality = "Error" + timestamp = None + if include_error: + error_msgs[tag] = self._opc.GetErrorString(tag_error[tag]).strip("\r\n") + else: + value = None + quality = "Error" + timestamp = None + if include_error: + if tag not in error_msgs: + error_msgs[tag] = "" + if single: + if include_error: + yield ( + value, quality, timestamp, error_msgs[tag]) + else: + yield ( + value, quality, timestamp) + else: + if include_error: + yield ( + tag, value, quality, timestamp, error_msgs[tag]) + yield ( + tag, value, quality, timestamp) + + if group == None: + try: + if not sync: + if opc_group.Name in self._group_hooks: + if self.trace: + self.trace("CloseEvents(%s)" % opc_group.Name) + self._group_hooks[opc_group.Name].close() + if self.trace: + self.trace("RemoveGroup(%s)" % opc_group.Name) + opc_groups.Remove(opc_group.Name) + except pythoncom.com_error as err: + try: + error_msg = "RemoveGroup: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + except pythoncom.com_error as err: + try: + error_msg = "read: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + def read(self, tags=None, group=None, size=None, pause=0, source='hybrid', update=-1, timeout=5000, sync=False, include_error=False, rebuild=False): + """Return list of (value, quality, time) tuples for the specified tag(s)""" + tags_list, single, valid = type_check(tags) + if not valid: + raise TypeError("read(): 'tags' parameter must be a string or a list of strings") + else: + num_health_tags = len([t for t in tags_list if t[None[:1]] == "@"]) + num_opc_tags = len([t for t in tags_list if t[None[:1]] != "@"]) + if num_health_tags > 0: + if num_opc_tags > 0: + raise TypeError("read(): system health and OPC tags cannot be included in the same group") + results = self._read_health(tags) + else: + results = self.iread(tags, group, size, pause, source, update, timeout, sync, include_error, rebuild) + if single: + return list(results)[0] + return list(results) + + def _read_health(self, tags): + """Return values of special system health monitoring tags""" + self._update_tx_time() + tags, single, valid = type_check(tags) + time_str = time.strftime("%x %H:%M:%S") + results = [] + for t in tags: + if t == "@MemFree": + value = SystemHealth.mem_free() + else: + if t == "@MemUsed": + value = SystemHealth.mem_used() + else: + if t == "@MemTotal": + value = SystemHealth.mem_total() + else: + if t == "@MemPercent": + value = SystemHealth.mem_percent() + else: + if t == "@DiskFree": + value = SystemHealth.disk_free() + else: + if t == "@SineWave": + value = SystemHealth.sine_wave() + else: + if t == "@SawWave": + value = SystemHealth.saw_wave() + else: + if t == "@CpuUsage": + if self.cpu == None: + self.cpu = SystemHealth.CPU() + time.sleep(0.1) + value = self.cpu.get_usage() + else: + value = None + m = re.match("@TaskMem\\((.*?)\\)", t) + if m: + image_name = m.group(1) + value = SystemHealth.task_mem(image_name) + else: + m = re.match("@TaskCpu\\((.*?)\\)", t) + if m: + image_name = m.group(1) + value = SystemHealth.task_cpu(image_name) + else: + m = re.match("@TaskExists\\((.*?)\\)", t) + if m: + image_name = m.group(1) + value = SystemHealth.task_exists(image_name) + if value == None: + quality = "Error" + else: + quality = "Good" + if single: + results.append((value, quality, time_str)) + results.append((t, value, quality, time_str)) + + return results + + def iwrite(self, tag_value_pairs, size=None, pause=0, include_error=False): + """Iterable version of write()""" + try: + self._update_tx_time() + pythoncom.CoInitialize() + + def _valid_pair(p): + if type(p) in (list, tuple): + if len(p) >= 2: + if type(p[0]) in (str, bytes): + return True + return False + + if type(tag_value_pairs) not in (list, tuple): + raise TypeError("write(): 'tag_value_pairs' parameter must be a (tag, value) tuple or a list of (tag,value) tuples") + else: + if tag_value_pairs == None: + tag_value_pairs = [ + ""] + single = False + else: + if type(tag_value_pairs[0]) in (str, bytes): + tag_value_pairs = [ + tag_value_pairs] + single = True + else: + single = False + invalid_pairs = [p for p in tag_value_pairs if not _valid_pair(p)] + if len(invalid_pairs) > 0: + raise TypeError("write(): 'tag_value_pairs' parameter must be a (tag, value) tuple or a list of (tag,value) tuples") + names = [tag[0] for tag in tag_value_pairs] + tags = [tag[0] for tag in tag_value_pairs] + values = [tag[1] for tag in tag_value_pairs] + if size: + name_groups = [names[i[:i + size]] for i in range(0, len(names), size)] + tag_groups = [tags[i[:i + size]] for i in range(0, len(tags), size)] + value_groups = [values[i[:i + size]] for i in range(0, len(values), size)] + else: + name_groups = [ + names] + tag_groups = [ + tags] + value_groups = [values] + num_groups = len(tag_groups) + status = [] + for gid in range(num_groups): + if gid > 0: + if pause > 0: + time.sleep(pause / 1000.0) + opc_groups = self._opc.OPCGroups + opc_group = opc_groups.Add() + opc_items = opc_group.OPCItems + names = name_groups[gid] + tags = tag_groups[gid] + values = value_groups[gid] + names.insert(0, 0) + errors = [] + try: + errors = opc_items.Validate(len(names) - 1, names) + except: + pass + + n = 1 + valid_tags = [] + valid_values = [] + client_handles = [] + error_msgs = {} + for i, tag in enumerate(tags): + if errors[i] == 0: + valid_tags.append(tag) + valid_values.append(values[i]) + client_handles.append(n) + error_msgs[tag] = "" + n += 1 + + client_handles.insert(0, 0) + valid_tags.insert(0, 0) + server_handles = [] + errors = [] + try: + server_handles, errors = opc_items.AddItems(len(client_handles) - 1, valid_tags, client_handles) + except: + pass + + valid_tags_tmp = [] + valid_values_tmp = [] + server_handles_tmp = [] + valid_tags.pop(0) + for i, tag in enumerate(valid_tags): + if errors[i] == 0: + valid_tags_tmp.append(tag) + valid_values_tmp.append(valid_values[i]) + server_handles_tmp.append(server_handles[i]) + error_msgs[tag] = "" + + valid_tags = valid_tags_tmp + valid_values = valid_values_tmp + server_handles = server_handles_tmp + server_handles.insert(0, 0) + valid_values.insert(0, 0) + errors = [] + if len(valid_values) > 1: + try: + errors = opc_group.SyncWrite(len(server_handles) - 1, server_handles, valid_values) + except: + pass + + n = 0 + for tag in tags: + if tag in valid_tags: + if errors[n] == 0: + status = "Success" + else: + status = "Error" + if include_error: + error_msgs[tag] = self._opc.GetErrorString(errors[n]) + n += 1 + else: + status = "Error" + if include_error: + error_msgs[tag] = error_msgs[tag].strip("\r\n") + if single: + if include_error: + yield ( + status, error_msgs[tag]) + else: + yield status + elif include_error: + yield ( + tag, status, error_msgs[tag]) + else: + yield ( + tag, status) + + opc_groups.Remove(opc_group.Name) + + except pythoncom.com_error as err: + try: + error_msg = "write: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + def write(self, tag_value_pairs, size=None, pause=0, include_error=False): + """Write list of (tag, value) pair(s) to the server""" + if type(tag_value_pairs) in (list, tuple) and type(tag_value_pairs[0]) in (list, tuple): + single = False + else: + single = True + status = self.iwrite(tag_value_pairs, size, pause, include_error) + if single: + return list(status)[0] + return list(status) + + def groups(self): + """Return a list of active tag groups""" + return self._groups.keys() + + def remove(self, groups): + """Remove the specified tag group(s)""" + try: + pythoncom.CoInitialize() + opc_groups = self._opc.OPCGroups + if type(groups) in (str, bytes): + groups = [ + groups] + single = True + else: + single = False + status = [] + for group in groups: + if group in self._groups: + for i in range(self._groups[group]): + sub_group = "%s.%d" % (group, i) + if sub_group in self._group_hooks: + if self.trace: + self.trace("CloseEvents(%s)" % sub_group) + self._group_hooks[sub_group].close() + try: + if self.trace: + self.trace("RemoveGroup(%s)" % sub_group) + errors = opc_groups.Remove(sub_group) + except pythoncom.com_error as err: + try: + error_msg = "RemoveGroup: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + del self._group_tags[sub_group] + del self._group_valid_tags[sub_group] + del self._group_handles_tag[sub_group] + del self._group_server_handles[sub_group] + + del self._groups[group] + + except pythoncom.com_error as err: + try: + error_msg = "remove: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + def iproperties(self, tags, id=None): + """Iterable version of properties()""" + try: + self._update_tx_time() + pythoncom.CoInitialize() + tags, single_tag, valid = type_check(tags) + if not valid: + raise TypeError("properties(): 'tags' parameter must be a string or a list of strings") + try: + id.remove(0) + include_name = True + except: + include_name = False + + if id != None: + descriptions = [] + if isinstance(id, list) or isinstance(id, tuple): + property_id = list(id) + single_property = False + else: + property_id = [ + id] + single_property = True + for i in property_id: + descriptions.append("Property id %d" % i) + + else: + single_property = False + properties = [] + for tag in tags: + if id == None: + descriptions = [] + property_id = [] + count, property_id, descriptions, datatypes = self._opc.QueryAvailableProperties(tag) + tag_properties = list(map((lambda x, y: (x, y)), property_id, descriptions)) + property_id = [p for p, d in tag_properties if p > 0] + descriptions = [d for p, d in tag_properties if p > 0] + else: + property_id.insert(0, 0) + values = [] + errors = [] + values, errors = self._opc.GetItemProperties(tag, len(property_id) - 1, property_id) + property_id.pop(0) + values = [str(v) if type(v) == pywintypes.TimeType else v for v in values] + try: + i = property_id.index(1) + values[i] = vt[values[i]] + except: + pass + + try: + i = property_id.index(3) + values[i] = quality_str(values[i]) + except: + pass + + try: + i = property_id.index(5) + values[i] = ACCESS_RIGHTS[values[i]] + except: + pass + + if id != None: + if single_property: + if single_tag: + tag_properties = values + else: + tag_properties = [ + values] + else: + tag_properties = list(map((lambda x, y: (x, y)), property_id, values)) + else: + tag_properties = list(map((lambda x, y, z: (x, y, z)), property_id, descriptions, values)) + tag_properties.insert(0, (0, "Item ID (virtual property)", tag)) + if include_name: + tag_properties.insert(0, (0, tag)) + tag_properties = single_tag or [tuple([tag] + list(p)) for p in tag_properties] + for p in tag_properties: + yield p + + except pythoncom.com_error as err: + try: + error_msg = "properties: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + def properties(self, tags, id=None): + """Return list of property tuples (id, name, value) for the specified tag(s) """ + if type(tags) not in (list, tuple) and type(id) not in (type(None), list, tuple): + single = True + else: + single = False + props = self.iproperties(tags, id) + if single: + return list(props)[0] + return list(props) + + def ilist(self, paths='*', recursive=False, flat=False, include_type=False): + """Iterable version of list()""" + try: + self._update_tx_time() + pythoncom.CoInitialize() + try: + browser = self._opc.CreateBrowser() + except: + return + else: + paths, single, valid = type_check(paths) + if not valid: + raise TypeError("list(): 'paths' parameter must be a string or a list of strings") + if len(paths) == 0: + paths = [ + "*"] + nodes = {} + for path in paths: + if flat: + browser.MoveToRoot() + browser.Filter = "" + browser.ShowLeafs(True) + pattern = re.compile("^%s$" % wild2regex(path), re.IGNORECASE) + matches = filter(pattern.search, browser) + if include_type: + matches = [(x, node_type) for x in matches] + for node in matches: + yield node + + continue + queue = [] + queue.append(path) + while len(queue) > 0: + tag = queue.pop(0) + browser.MoveToRoot() + browser.Filter = "" + pattern = None + path_str = "/" + path_list = tag.replace(".", "/").split("/") + path_list = [p for p in path_list if len(p) > 0] + found_filter = False + path_postfix = "/" + for i, p in enumerate(path_list): + if found_filter: + path_postfix += p + "/" + else: + if p.find("*") >= 0: + pattern = re.compile("^%s$" % wild2regex(p), re.IGNORECASE) + found_filter = True + + browser.ShowBranches() + if len(browser) == 0: + browser.ShowLeafs(False) + lowest_level = True + node_type = "Leaf" + else: + lowest_level = False + node_type = "Branch" + matches = filter(pattern.search, browser) + if not lowest_level: + if recursive: + queue += [path_str + x + path_postfix for x in matches] + elif lowest_level: + matches = [exceptional(browser.GetItemID, x)(x) for x in matches] + if include_type: + matches = [(x, node_type) for x in matches] + for node in matches: + if node not in nodes: + yield node + nodes[node] = True + + except pythoncom.com_error as err: + try: + error_msg = "list: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + def list(self, paths='*', recursive=False, flat=False, include_type=False): + """Return list of item nodes at specified path(s) (tree browser)""" + nodes = self.ilist(paths, recursive, flat, include_type) + return list(nodes) + + def servers(self, opc_host='localhost'): + """Return list of available OPC servers""" + try: + pythoncom.CoInitialize() + servers = self._opc.GetOPCServers(opc_host) + servers = [s for s in servers if s != None] + return servers + except pythoncom.com_error as err: + try: + error_msg = "servers: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + def info(self): + """Return list of (name, value) pairs about the OPC server""" + try: + self._update_tx_time() + pythoncom.CoInitialize() + info_list = [] + if self._open_serv: + mode = "OpenOPC" + else: + mode = "DCOM" + info_list += [("Protocol", mode)] + if mode == "OpenOPC": + info_list += [("Gateway Host", "%s:%s" % (self._open_host, self._open_port))] + info_list += [("Gateway Version", "%s" % __version__)] + info_list += [("Class", self.opc_class)] + info_list += [("Client Name", self._opc.ClientName)] + info_list += [("OPC Host", self.opc_host)] + info_list += [("OPC Server", self._opc.ServerName)] + info_list += [("State", OPC_STATUS[self._opc.ServerState])] + info_list += [("Version", "%d.%d (Build %d)" % (self._opc.MajorVersion, self._opc.MinorVersion, self._opc.BuildNumber))] + try: + browser = self._opc.CreateBrowser() + browser_type = BROWSER_TYPE[browser.Organization] + except: + browser_type = "Not Supported" + + info_list += [("Browser", browser_type)] + info_list += [("Start Time", str(self._opc.StartTime))] + info_list += [("Current Time", str(self._opc.CurrentTime))] + info_list += [("Vendor", self._opc.VendorInfo)] + return info_list + except pythoncom.com_error as err: + try: + error_msg = "info: %s" % self._get_error_str(err) + raise OPCError(error_msg) + finally: + err = None + del err + + def ping(self): + """Check if we are still talking to the OPC server""" + try: + opc_serv_time = int(float(self._opc.CurrentTime) * 1000000.0) + if opc_serv_time == self._prev_serv_time: + return False + self._prev_serv_time = opc_serv_time + return True + except pythoncom.com_error: + return False + + def _get_error_str(self, err): + """Return the error string for a OPC or COM error code""" + hr, msg, exc, arg = err.args + if exc == None: + error_str = str(msg) + else: + scode = exc[5] + try: + opc_err_str = unicode(self._opc.GetErrorString(scode)).strip("\r\n") + except: + opc_err_str = None + + try: + com_err_str = unicode(pythoncom.GetScodeString(scode)).strip("\r\n") + except: + com_err_str = None + + if opc_err_str == None and com_err_str == None: + error_str = str(scode) + else: + if opc_err_str == com_err_str: + error_str = opc_err_str + else: + if opc_err_str == None: + error_str = com_err_str + else: + if com_err_str == None: + error_str = opc_err_str + else: + error_str = "%s (%s)" % (opc_err_str, com_err_str) + return error_str + + def _update_tx_time(self): + """Update the session's last transaction time in the Gateway Service""" + if self._open_serv: + self._open_serv._tx_times[self._open_guid] = time.time() + + def __getitem__(self, key): + """Read single item (tag as dictionary key)""" + value, quality, time = self.read(key) + return value + + def __setitem__(self, key, value): + """Write single item (tag as dictionary key)""" + self.write((key, value)) diff --git a/APPS_UNCOMPILED/lib/Pyro4/__init__.py b/APPS_UNCOMPILED/lib/Pyro4/__init__.py new file mode 100644 index 0000000..cf95197 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2853 bytes diff --git a/APPS_UNCOMPILED/lib/Pyro4/configuration.py b/APPS_UNCOMPILED/lib/Pyro4/configuration.py new file mode 100644 index 0000000..a29f72d --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/configuration.py @@ -0,0 +1,159 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/configuration.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 6892 bytes +""" +Configuration settings. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import os, platform, pickle, socket +from Pyro4 import constants + +class Configuration(object): + __slots__ = ('HOST', 'NS_HOST', 'NS_PORT', 'NS_BCPORT', 'NS_BCHOST', 'NS_AUTOCLEAN', + 'COMPRESSION', 'SERVERTYPE', 'COMMTIMEOUT', 'POLLTIMEOUT', 'ONEWAY_THREADED', + 'DETAILED_TRACEBACK', 'SOCK_REUSE', 'SOCK_NODELAY', 'PREFER_IP_VERSION', + 'THREADPOOL_SIZE', 'THREADPOOL_SIZE_MIN', 'AUTOPROXY', 'PICKLE_PROTOCOL_VERSION', + 'BROADCAST_ADDRS', 'NATHOST', 'NATPORT', 'MAX_MESSAGE_SIZE', 'FLAME_ENABLED', + 'SERIALIZER', 'SERIALIZERS_ACCEPTED', 'LOGWIRE', 'METADATA', 'REQUIRE_EXPOSE', + 'USE_MSG_WAITALL', 'JSON_MODULE', 'MAX_RETRIES', 'DILL_PROTOCOL_VERSION', + 'ITER_STREAMING', 'ITER_STREAM_LIFETIME', 'ITER_STREAM_LINGER', + 'SSL', 'SSL_REQUIRECLIENTCERT', 'SSL_CACERTS', 'SSL_SERVERCERT', + 'SSL_SERVERKEY', 'SSL_SERVERKEYPASSWD', 'SSL_CLIENTCERT', 'SSL_CLIENTKEY', + 'SSL_CLIENTKEYPASSWD') + + def __init__(self): + self.reset() + + def reset(self, useenvironment=True): + """ + Set default config items. + If useenvironment is False, won't read environment variables settings (useful if you can't trust your env). + """ + self.HOST = "localhost" + self.NS_HOST = self.HOST + self.NS_PORT = 9090 + self.NS_BCPORT = 9091 + self.NS_BCHOST = None + self.NS_AUTOCLEAN = 0.0 + self.NATHOST = None + self.NATPORT = 0 + self.COMPRESSION = False + self.SERVERTYPE = "thread" + self.COMMTIMEOUT = 0.0 + self.POLLTIMEOUT = 2.0 + self.SOCK_REUSE = True + self.SOCK_NODELAY = False + self.ONEWAY_THREADED = True + self.DETAILED_TRACEBACK = False + self.THREADPOOL_SIZE = 40 + self.THREADPOOL_SIZE_MIN = 4 + self.AUTOPROXY = True + self.MAX_MESSAGE_SIZE = 0 + self.BROADCAST_ADDRS = ", 0.0.0.0" + self.FLAME_ENABLED = False + self.PREFER_IP_VERSION = 4 + self.SERIALIZER = "serpent" + self.SERIALIZERS_ACCEPTED = "serpent,marshal,json" + self.LOGWIRE = False + self.PICKLE_PROTOCOL_VERSION = pickle.HIGHEST_PROTOCOL + try: + import dill + self.DILL_PROTOCOL_VERSION = dill.HIGHEST_PROTOCOL + except ImportError: + self.DILL_PROTOCOL_VERSION = -1 + + self.METADATA = True + self.REQUIRE_EXPOSE = True + self.USE_MSG_WAITALL = hasattr(socket, "MSG_WAITALL") and platform.system() != "Windows" + self.JSON_MODULE = "json" + self.MAX_RETRIES = 0 + self.ITER_STREAMING = True + self.ITER_STREAM_LIFETIME = 0.0 + self.ITER_STREAM_LINGER = 30.0 + self.SSL = False + self.SSL_SERVERCERT = "" + self.SSL_SERVERKEY = "" + self.SSL_SERVERKEYPASSWD = "" + self.SSL_REQUIRECLIENTCERT = False + self.SSL_CLIENTCERT = "" + self.SSL_CLIENTKEY = "" + self.SSL_CLIENTKEYPASSWD = "" + self.SSL_CACERTS = "" + if useenvironment: + PREFIX = "PYRO_" + for symbol in self.__slots__: + if PREFIX + symbol in os.environ: + value = getattr(self, symbol) + envvalue = os.environ[PREFIX + symbol] + if value is not None: + valuetype = type(value) + if valuetype is bool: + envvalue = envvalue.lower() + if envvalue in ('0', 'off', 'no', 'false'): + envvalue = False + else: + if envvalue in ('1', 'yes', 'on', 'true'): + envvalue = True + else: + raise ValueError("invalid boolean value: %s%s=%s" % (PREFIX, symbol, envvalue)) + else: + envvalue = valuetype(envvalue) + setattr(self, symbol, envvalue) + + self.SERIALIZERS_ACCEPTED = set(self.SERIALIZERS_ACCEPTED.split(",")) + + def asDict(self): + """returns the current config as a regular dictionary""" + result = {} + for item in self.__slots__: + result[item] = getattr(self, item) + + return result + + def parseAddressesString(self, addresses): + """ + Parses the addresses string which contains one or more ip addresses separated by a comma. + Returns a sequence of these addresses. '' is replaced by the empty string. + """ + result = [] + for addr in addresses.split(","): + addr = addr.strip() + if addr == "''": + addr = "" + result.append(addr) + + return result + + def dump(self): + if hasattr(platform, "python_implementation"): + implementation = platform.python_implementation() + else: + implementation = "???" + config = self.asDict() + config["LOGFILE"] = os.environ.get("PYRO_LOGFILE") + config["LOGLEVEL"] = os.environ.get("PYRO_LOGLEVEL") + result = ["Pyro version: %s" % constants.VERSION, + "Loaded from: %s" % os.path.dirname(__file__), + "Python version: %s %s (%s, %s)" % (implementation, platform.python_version(), platform.system(), os.name), + "Protocol version: %d" % constants.PROTOCOL_VERSION, + "Currently active configuration settings:"] + for n, v in sorted(config.items()): + result.append("%s = %s" % (n, v)) + + return "\n".join(result) + + +config = Configuration() + +def main(): + print(config.dump()) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/lib/Pyro4/constants.py b/APPS_UNCOMPILED/lib/Pyro4/constants.py new file mode 100644 index 0000000..b904f0f --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/constants.py @@ -0,0 +1,17 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/constants.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 497 bytes +""" +Definitions of various hard coded constants. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +VERSION = "4.82" +DAEMON_NAME = "Pyro.Daemon" +NAMESERVER_NAME = "Pyro.NameServer" +FLAME_NAME = "Pyro.Flame" +PROTOCOL_VERSION = 48 diff --git a/APPS_UNCOMPILED/lib/Pyro4/core.py b/APPS_UNCOMPILED/lib/Pyro4/core.py new file mode 100644 index 0000000..2424663 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/core.py @@ -0,0 +1,1971 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/core.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 99271 bytes +""" +Core logic (uri, daemon, proxy stuff). + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function, division +import inspect, re, logging, sys, ssl, os, time, threading, uuid, base64, warnings, socket, random +from Pyro4 import errors, socketutil, util, constants, message, futures +from Pyro4.configuration import config +__all__ = [ + 'URI', 'Proxy', 'Daemon', 'current_context', 'callback', 'batch', 'asyncproxy', + 'expose', 'behavior', + 'oneway', 'SerializedBlob', '_resolve', '_locateNS'] +if sys.version_info >= (3, 0): + basestring = str +log = logging.getLogger("Pyro4.core") +print_debug_log = None + +class URI(object): + __doc__ = "\n Pyro object URI (universal resource identifier).\n The uri format is like this: ``PYRO:objectid@location`` where location is one of:\n\n - ``hostname:port`` (tcp/ip socket on given port)\n - ``./u:sockname`` (Unix domain socket on localhost)\n\n There is also a 'Magic format' for simple name resolution using Name server:\n ``PYRONAME:objectname[@location]`` (optional name server location, can also omit location port)\n And one that looks up things in the name server by metadata:\n ``PYROMETA:meta1,meta2,...[@location]`` (optional name server location, can also omit location port)\n\n You can write the protocol in lowercase if you like (``pyro:...``) but it will\n automatically be converted to uppercase internally.\n " + uriRegEx = re.compile("(?P[Pp][Yy][Rr][Oo][a-zA-Z]*):(?P\\S+?)(@(?P.+))?$") + + def __init__(self, uri): + if isinstance(uri, URI): + state = uri.__getstate__() + self.__setstate__(state) + return + if not isinstance(uri, basestring): + raise TypeError("uri parameter object is of wrong type") + else: + self.sockname = self.host = self.port = None + match = self.uriRegEx.match(uri) + if not match: + raise errors.PyroError("invalid uri") + else: + self.protocol = match.group("protocol").upper() + self.object = match.group("object") + location = match.group("location") + if self.protocol == "PYRONAME": + self._parseLocation(location, config.NS_PORT) + else: + if self.protocol == "PYRO": + if not location: + raise errors.PyroError("invalid uri") + self._parseLocation(location, None) + else: + if self.protocol == "PYROMETA": + self.object = set((m.strip() for m in self.object.split(","))) + self._parseLocation(location, config.NS_PORT) + else: + raise errors.PyroError("invalid uri (protocol)") + + def _parseLocation(self, location, defaultPort): + if not location: + return + if location.startswith("./u:"): + self.sockname = location[4[:None]] + if not self.sockname or ":" in self.sockname: + raise errors.PyroError("invalid uri (location)") + else: + if location.startswith("["): + if location.startswith("[["): + raise errors.PyroError("invalid ipv6 address: enclosed in too many brackets") + ipv6locationmatch = re.match("\\[([0-9a-fA-F:%]+)](:(\\d+))?", location) + if not ipv6locationmatch: + raise errors.PyroError("invalid ipv6 address: the part between brackets must be a numeric ipv6 address") + self.host, _, self.port = ipv6locationmatch.groups() + else: + self.host, _, self.port = location.partition(":") + if not self.port: + self.port = defaultPort + try: + self.port = int(self.port) + except (ValueError, TypeError): + raise errors.PyroError("invalid port in uri, port=" + str(self.port)) + + @staticmethod + def isUnixsockLocation(location): + """determine if a location string is for a Unix domain socket""" + return location.startswith("./u:") + + @property + def location(self): + """property containing the location string, for instance ``"servername.you.com:5555"``""" + if self.host: + if ":" in self.host: + return "[%s]:%d" % (self.host, self.port) + return "%s:%d" % (self.host, self.port) + else: + if self.sockname: + return "./u:" + self.sockname + return + + def asString(self): + """the string representation of this object""" + if self.protocol == "PYROMETA": + result = "PYROMETA:" + ",".join(self.object) + else: + result = self.protocol + ":" + self.object + location = self.location + if location: + result += "@" + location + return result + + def __str__(self): + string = self.asString() + if sys.version_info < (3, 0): + if type(string) is unicode: + return string.encode("ascii", "replace") + return string + + def __unicode__(self): + return self.asString() + + def __repr__(self): + return "<%s.%s at 0x%x; %s>" % (self.__class__.__module__, self.__class__.__name__, id(self), str(self)) + + def __eq__(self, other): + if not isinstance(other, URI): + return False + return ( + self.protocol, self.object, self.sockname, self.host, self.port) == ( + other.protocol, other.object, other.sockname, other.host, other.port) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self.protocol, str(self.object), self.sockname, self.host, self.port)) + + def __getstate__(self): + return ( + self.protocol, self.object, self.sockname, self.host, self.port) + + def __setstate__(self, state): + self.protocol, self.object, self.sockname, self.host, self.port = state + + def __getstate_for_dict__(self): + return self.__getstate__() + + def __setstate_from_dict__(self, state): + self.__setstate__(state) + + +class _RemoteMethod(object): + __doc__ = "method call abstraction" + + def __init__(self, send, name, max_retries): + self._RemoteMethod__send = send + self._RemoteMethod__name = name + self._RemoteMethod__max_retries = max_retries + + def __getattr__(self, name): + return _RemoteMethod(self._RemoteMethod__send, "%s.%s" % (self._RemoteMethod__name, name), self._RemoteMethod__max_retries) + + def __call__(self, *args, **kwargs): + for attempt in range(self._RemoteMethod__max_retries + 1): + try: + return self._RemoteMethod__send(self._RemoteMethod__name, args, kwargs) + except (errors.ConnectionClosedError, errors.TimeoutError): + if attempt >= self._RemoteMethod__max_retries: + raise + + +class Proxy(object): + __doc__ = "\n Pyro proxy for a remote object. Intercepts method calls and dispatches them to the remote object.\n\n .. automethod:: _pyroBind\n .. automethod:: _pyroRelease\n .. automethod:: _pyroReconnect\n .. automethod:: _pyroBatch\n .. automethod:: _pyroAsync\n .. automethod:: _pyroAnnotations\n .. automethod:: _pyroResponseAnnotations\n .. automethod:: _pyroValidateHandshake\n .. autoattribute:: _pyroTimeout\n .. autoattribute:: _pyroHmacKey\n .. attribute:: _pyroMaxRetries\n\n Number of retries to perform on communication calls by this proxy, allows you to override the default setting.\n\n .. attribute:: _pyroSerializer\n\n Name of the serializer to use by this proxy, allows you to override the default setting.\n\n .. attribute:: _pyroHandshake\n\n The data object that should be sent in the initial connection handshake message. Can be any serializable object.\n " + _Proxy__pyroAttributes = frozenset([ + '__getnewargs__', '__getnewargs_ex__', '__getinitargs__', '_pyroConnection', + '_pyroUri', + '_pyroOneway', '_pyroMethods', '_pyroAttrs', + '_pyroTimeout', '_pyroSeq', '_pyroHmacKey', + '_pyroRawWireResponse', + '_pyroHandshake', '_pyroMaxRetries', '_pyroSerializer', '_Proxy__async', + '_Proxy__pyroHmacKey', + '_Proxy__pyroTimeout', '_Proxy__pyroConnLock']) + + def __init__(self, uri, connected_socket=None): + if connected_socket: + uri = URI("PYRO:" + uri + "@<>:0") + if isinstance(uri, basestring): + uri = URI(uri) + else: + if not isinstance(uri, URI): + raise TypeError("expected Pyro URI") + self._pyroUri = uri + self._pyroConnection = None + self._pyroSerializer = None + self._pyroMethods = set() + self._pyroAttrs = set() + self._pyroOneway = set() + self._pyroSeq = 0 + self._pyroRawWireResponse = False + self._pyroHandshake = "hello" + self._pyroMaxRetries = config.MAX_RETRIES + self._Proxy__pyroHmacKey = None + self._Proxy__pyroTimeout = config.COMMTIMEOUT + self._Proxy__pyroConnLock = threading.RLock() + util.get_serializer(config.SERIALIZER) + self._Proxy__async = False + current_context.annotations = {} + current_context.response_annotations = {} + if connected_socket: + self._Proxy__pyroCreateConnection(False, connected_socket) + + @property + def _pyroHmacKey(self): + """the HMAC key (bytes) that this proxy uses""" + return self._Proxy__pyroHmacKey + + @_pyroHmacKey.setter + def _pyroHmacKey(self, value): + if value: + if sys.version_info >= (3, 0): + if type(value) is not bytes: + value = value.encode("utf-8") + self._Proxy__pyroHmacKey = value + + def __del__(self): + if hasattr(self, "_pyroConnection"): + self._pyroRelease() + + def __getattr__(self, name): + if name in Proxy._Proxy__pyroAttributes: + raise AttributeError(name) + elif config.METADATA: + if not self._pyroMethods: + if not self._pyroAttrs: + self._pyroGetMetadata() + if name in self._pyroAttrs: + return self._pyroInvoke("__getattr__", (name,), None) + if config.METADATA and name not in self._pyroMethods: + raise AttributeError("remote object '%s' has no exposed attribute or method '%s'" % (self._pyroUri, name)) + if self._Proxy__async: + return _AsyncRemoteMethod(self, name, self._pyroMaxRetries) + return _RemoteMethod(self._pyroInvoke, name, self._pyroMaxRetries) + + def __setattr__(self, name, value): + if name in Proxy._Proxy__pyroAttributes: + return super(Proxy, self).__setattr__(name, value) + if config.METADATA: + if not self._pyroMethods: + if not self._pyroAttrs: + self._pyroGetMetadata() + if name in self._pyroAttrs: + return self._pyroInvoke("__setattr__", (name, value), None) + if config.METADATA: + raise AttributeError("remote object '%s' has no exposed attribute '%s'" % (self._pyroUri, name)) + return super(Proxy, self).__setattr__(name, value) + + def __repr__(self): + if self._pyroConnection: + connected = "connected " + self._pyroConnection.family() + else: + connected = "not connected" + return "<%s.%s at 0x%x; %s; for %s>" % (self.__class__.__module__, self.__class__.__name__, + id(self), connected, self._pyroUri) + + def __unicode__(self): + return str(self) + + def __getstate_for_dict__(self): + encodedHmac = None + if self._pyroHmacKey is not None: + encodedHmac = "b64:" + base64.b64encode(self._pyroHmacKey).decode("ascii") + return (self._pyroUri.asString(), tuple(self._pyroOneway), tuple(self._pyroMethods), tuple(self._pyroAttrs), + self._Proxy__pyroTimeout, encodedHmac, self._pyroHandshake, self._pyroMaxRetries, self._pyroSerializer) + + def __setstate_from_dict__(self, state): + uri = URI(state[0]) + oneway = set(state[1]) + methods = set(state[2]) + attrs = set(state[3]) + timeout = state[4] + hmac_key = state[5] + handshake = state[6] + max_retries = state[7] + serializer = None if len(state) < 9 else state[8] + if hmac_key: + if hmac_key.startswith("b64:"): + hmac_key = base64.b64decode(hmac_key[4[:None]].encode("ascii")) + else: + raise errors.ProtocolError("hmac encoding error") + self.__setstate__((uri, oneway, methods, attrs, timeout, hmac_key, handshake, max_retries, serializer)) + + def __getstate__(self): + return ( + self._pyroUri, self._pyroOneway, self._pyroMethods, self._pyroAttrs, self._Proxy__pyroTimeout, + self._pyroHmacKey, self._pyroHandshake, self._pyroMaxRetries, self._pyroSerializer) + + def __setstate__(self, state): + self._pyroUri, self._pyroOneway, self._pyroMethods, self._pyroAttrs, _, self._pyroHmacKey, self._pyroHandshake = state[None[:7]] + self._pyroSerializer = None if len(state) < 9 else state[8] + self._Proxy__pyroTimeout = config.COMMTIMEOUT + self._pyroMaxRetries = config.MAX_RETRIES + self._pyroConnection = None + self._pyroSeq = 0 + self._pyroRawWireResponse = False + self._Proxy__pyroConnLock = threading.RLock() + self._Proxy__async = False + + def __copy__(self): + uriCopy = URI(self._pyroUri) + p = type(self)(uriCopy) + p._pyroOneway = set(self._pyroOneway) + p._pyroMethods = set(self._pyroMethods) + p._pyroAttrs = set(self._pyroAttrs) + p._pyroSerializer = self._pyroSerializer + p._pyroTimeout = self._pyroTimeout + p._pyroHandshake = self._pyroHandshake + p._pyroHmacKey = self._pyroHmacKey + p._pyroRawWireResponse = self._pyroRawWireResponse + p._pyroMaxRetries = self._pyroMaxRetries + p._Proxy__async = self._Proxy__async + return p + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._pyroRelease() + + def __eq__(self, other): + if other is self: + return True + return isinstance(other, Proxy) and other._pyroUri == self._pyroUri + + def __ne__(self, other): + if other: + if isinstance(other, Proxy): + return other._pyroUri != self._pyroUri + return True + + def __hash__(self): + return hash(self._pyroUri) + + def __dir__(self): + result = dir(self.__class__) + list(self.__dict__.keys()) + return sorted(set(result) | self._pyroMethods | self._pyroAttrs) + + def _pyroRelease(self): + """release the connection to the pyro daemon""" + with self._Proxy__pyroConnLock: + if self._pyroConnection is not None: + if self._pyroConnection.keep_open: + return + self._pyroConnection.close() + self._pyroConnection = None + log.debug("connection released") + + def _pyroBind(self): + """ + Bind this proxy to the exact object from the uri. That means that the proxy's uri + will be updated with a direct PYRO uri, if it isn't one yet. + If the proxy is already bound, it will not bind again. + """ + return self._Proxy__pyroCreateConnection(True) + + def __pyroGetTimeout(self): + return self._Proxy__pyroTimeout + + def __pyroSetTimeout(self, timeout): + self._Proxy__pyroTimeout = timeout + if self._pyroConnection is not None: + self._pyroConnection.timeout = timeout + + _pyroTimeout = property(_Proxy__pyroGetTimeout, _Proxy__pyroSetTimeout, doc="\n The timeout in seconds for calls on this proxy. Defaults to ``None``.\n If the timeout expires before the remote method call returns,\n Pyro will raise a :exc:`Pyro4.errors.TimeoutError`") + + def opcda_set_debug_log(self, print_debug_log_callback): + global print_debug_log + print_debug_log = print_debug_log_callback + + def _pyroInvoke(self, methodname, vargs, kwargs, flags=0, objectId=None): + """perform the remote method call communication""" + current_context.response_annotations = {} + with self._Proxy__pyroConnLock: + if self._pyroConnection is None: + self._Proxy__pyroCreateConnection() + else: + serializer = util.get_serializer(self._pyroSerializer or config.SERIALIZER) + objectId = objectId or self._pyroConnection.objectId + annotations = self._Proxy__annotations() + if vargs and isinstance(vargs[0], SerializedBlob): + data, compressed, flags = self._Proxy__serializeBlobArgs(vargs, kwargs, annotations, flags, objectId, methodname, serializer) + else: + data, compressed = serializer.serializeCall(objectId, methodname, vargs, kwargs, compress=(config.COMPRESSION)) + if compressed: + flags |= message.FLAGS_COMPRESSED + if methodname in self._pyroOneway: + flags |= message.FLAGS_ONEWAY + self._pyroSeq = self._pyroSeq + 1 & 65535 + msg = message.Message((message.MSG_INVOKE), data, (serializer.serializer_id), flags, (self._pyroSeq), annotations=annotations, + hmac_key=(self._pyroHmacKey)) + if config.LOGWIRE: + _log_wiredata(log, "proxy wiredata sending", msg) + try: + self._pyroConnection.send(msg.to_bytes()) + if print_debug_log: + print_debug_log(1, " ".join(("%02x" % msg.to_bytes()[i] for i in range(len(msg.to_bytes()))))) + else: + del msg + if flags & message.FLAGS_ONEWAY: + return + msg = message.Message.recv((self._pyroConnection), [message.MSG_RESULT], hmac_key=(self._pyroHmacKey)) + if print_debug_log: + print_debug_log(0, " ".join(("%02x" % msg.to_bytes()[i] for i in range(len(msg.to_bytes()))))) + if config.LOGWIRE: + _log_wiredata(log, "proxy wiredata received", msg) + self._Proxy__pyroCheckSequence(msg.seq) + if msg.serializer_id != serializer.serializer_id: + error = "invalid serializer in response: %d" % msg.serializer_id + log.error(error) + raise errors.SerializeError(error) + if msg.annotations: + current_context.response_annotations = msg.annotations + self._pyroResponseAnnotations(msg.annotations, msg.type) + if self._pyroRawWireResponse: + msg.decompress_if_needed() + return msg + data = serializer.deserializeData((msg.data), compressed=(msg.flags & message.FLAGS_COMPRESSED)) + if msg.flags & message.FLAGS_ITEMSTREAMRESULT: + streamId = bytes(msg.annotations.get("STRM", b'')).decode() + if not streamId: + raise errors.ProtocolError("result of call is an iterator, but the server is not configured to allow streaming") + return _StreamResultIterator(streamId, self) + if msg.flags & message.FLAGS_EXCEPTION: + if sys.platform == "cli": + util.fixIronPythonExceptionForPickle(data, False) + raise data + else: + return data + except (errors.CommunicationError, KeyboardInterrupt): + self._pyroRelease() + raise + + def __pyroCheckSequence(self, seq): + if seq != self._pyroSeq: + err = "invoke: reply sequence out of sync, got %d expected %d" % (seq, self._pyroSeq) + log.error(err) + raise errors.ProtocolError(err) + + def __pyroCreateConnection(self, replaceUri=False, connected_socket=None): + """ + Connects this proxy to the remote Pyro daemon. Does connection handshake. + Returns true if a new connection was made, false if an existing one was already present. + """ + + def connect_and_handshake(conn): + try: + if self._pyroConnection is not None: + return False + else: + if config.SSL: + sslContext = socketutil.getSSLcontext(clientcert=(config.SSL_CLIENTCERT), clientkey=(config.SSL_CLIENTKEY), + keypassword=(config.SSL_CLIENTKEYPASSWD), + cacerts=(config.SSL_CACERTS)) + else: + sslContext = None + sock = socketutil.createSocket(connect=connect_location, reuseaddr=(config.SOCK_REUSE), + timeout=(self._Proxy__pyroTimeout), + nodelay=(config.SOCK_NODELAY), + sslContext=sslContext) + conn = socketutil.SocketConnection(sock, uri.object) + serializer = util.get_serializer(self._pyroSerializer or config.SERIALIZER) + data = {"handshake": (self._pyroHandshake)} + if config.METADATA: + data["object"] = uri.object + flags = message.FLAGS_META_ON_CONNECT + else: + flags = 0 + data, compressed = serializer.serializeData(data, config.COMPRESSION) + if compressed: + flags |= message.FLAGS_COMPRESSED + msg = message.Message((message.MSG_CONNECT), data, (serializer.serializer_id), flags, (self._pyroSeq), annotations=(self._Proxy__annotations(False)), + hmac_key=(self._pyroHmacKey)) + if config.LOGWIRE: + _log_wiredata(log, "proxy connect sending", msg) + conn.send(msg.to_bytes()) + msg = message.Message.recv(conn, [message.MSG_CONNECTOK, message.MSG_CONNECTFAIL], hmac_key=(self._pyroHmacKey)) + if config.LOGWIRE: + _log_wiredata(log, "proxy connect response received", msg) + except Exception as x: + try: + if conn: + conn.close() + err = "cannot connect to %s: %s" % (connect_location, x) + log.error(err) + if isinstance(x, errors.CommunicationError): + raise + else: + ce = errors.CommunicationError(err) + if sys.version_info >= (3, 0): + ce.__cause__ = x + raise ce + finally: + x = None + del x + + else: + handshake_response = "?" + if msg.data: + serializer = util.get_serializer_by_id(msg.serializer_id) + handshake_response = serializer.deserializeData((msg.data), compressed=(msg.flags & message.FLAGS_COMPRESSED)) + if msg.type == message.MSG_CONNECTFAIL: + if sys.version_info < (3, 0): + error = "connection to %s rejected: %s" % (connect_location, handshake_response.decode()) + else: + error = "connection to %s rejected: %s" % (connect_location, handshake_response) + conn.close() + log.error(error) + raise errors.CommunicationError(error) + else: + if msg.type == message.MSG_CONNECTOK: + if msg.flags & message.FLAGS_META_ON_CONNECT: + self._Proxy__processMetadata(handshake_response["meta"]) + handshake_response = handshake_response["handshake"] + self._pyroConnection = conn + if replaceUri: + self._pyroUri = uri + self._pyroValidateHandshake(handshake_response) + log.debug("connected to %s - %s - %s", self._pyroUri, conn.family(), "SSL" if sslContext else "unencrypted") + if msg.annotations: + self._pyroResponseAnnotations(msg.annotations, msg.type) + else: + conn.close() + err = "cannot connect to %s: invalid msg type %d received" % (connect_location, msg.type) + log.error(err) + raise errors.ProtocolError(err) + + with self._Proxy__pyroConnLock: + if self._pyroConnection is not None: + return False + if connected_socket: + if config.SSL: + if not isinstance(connected_socket, ssl.SSLSocket): + raise socket.error("SSL configured for Pyro but existing socket is not a SSL socket") + uri = self._pyroUri + else: + uri = _resolve(self._pyroUri, self._pyroHmacKey) + conn = None + log.debug("connecting to %s", uri) + connect_location = uri.sockname or (uri.host, uri.port) + if connected_socket: + self._pyroConnection = socketutil.SocketConnection(connected_socket, uri.object, True) + else: + connect_and_handshake(conn) + if config.METADATA: + if self._pyroMethods or self._pyroAttrs: + log.debug("reusing existing metadata") + else: + self._pyroGetMetadata(uri.object) + return True + + def _pyroGetMetadata(self, objectId=None, known_metadata=None): + """ + Get metadata from server (methods, attrs, oneway, ...) and remember them in some attributes of the proxy. + Usually this will already be known due to the default behavior of the connect handshake, where the + connect response also includes the metadata. + """ + objectId = objectId or self._pyroUri.object + log.debug("getting metadata for object %s", objectId) + if self._pyroConnection is None: + if not known_metadata: + try: + self._Proxy__pyroCreateConnection() + except errors.PyroError: + log.error("problem getting metadata: cannot connect") + raise + + if not self._pyroMethods: + if self._pyroAttrs: + return + try: + result = known_metadata or self._pyroInvoke("get_metadata", [objectId], {}, objectId=(constants.DAEMON_NAME)) + self._Proxy__processMetadata(result) + except errors.PyroError: + log.exception("problem getting metadata") + raise + + def __processMetadata(self, metadata): + if not metadata: + return + self._pyroOneway = set(metadata["oneway"]) + self._pyroMethods = set(metadata["methods"]) + self._pyroAttrs = set(metadata["attrs"]) + if log.isEnabledFor(logging.DEBUG): + log.debug("from meta: methods=%s, oneway methods=%s, attributes=%s", sorted(self._pyroMethods), sorted(self._pyroOneway), sorted(self._pyroAttrs)) + elif not self._pyroMethods: + if not self._pyroAttrs: + raise errors.PyroError("remote object doesn't expose any methods or attributes. Did you forget setting @expose on them?") + + def _pyroReconnect(self, tries=100000000): + """ + (Re)connect the proxy to the daemon containing the pyro object which the proxy is for. + In contrast to the _pyroBind method, this one first releases the connection (if the proxy is still connected) + and retries making a new connection until it succeeds or the given amount of tries ran out. + """ + self._pyroRelease() + while tries: + try: + self._Proxy__pyroCreateConnection() + return + except errors.CommunicationError: + tries -= 1 + if tries: + time.sleep(2) + + msg = "failed to reconnect" + log.error(msg) + raise errors.ConnectionClosedError(msg) + + def _pyroBatch(self): + """returns a helper class that lets you create batched method calls on the proxy""" + return _BatchProxyAdapter(self) + + def _pyroAsync(self, asynchronous=True): + """turns the proxy into asynchronous mode so you can do asynchronous method calls, + or sets it back to normal sync mode if you set asynchronous=False. + This setting is strictly on a per-proxy basis (unless an exact clone is made + via copy.copy).""" + self._Proxy__async = asynchronous + + if sys.version_info < (3, 7): + _pyroAsync_37 = _pyroAsync + + def _pyroAsync(self, asynchronous=True, **kwargs): + if kwargs: + kword = list(kwargs.keys()) + if kword != ["async"]: + raise TypeError("_pyroAsync() got an unexpected keyword argument '{:s}'".format(kword[0])) + asynchronous = kwargs["async"] + return Proxy._pyroAsync_37(self, asynchronous) + + def _pyroInvokeBatch(self, calls, oneway=False): + flags = message.FLAGS_BATCH + if oneway: + flags |= message.FLAGS_ONEWAY + return self._pyroInvoke("", calls, None, flags) + + def _pyroAnnotations(self): + """ + Override to return a dict with custom user annotations to be sent with each request message. + Code using Pyro 4.56 or newer can skip this and instead set the annotations directly on the context object. + """ + return {} + + def _pyroResponseAnnotations(self, annotations, msgtype): + """ + Process any response annotations (dictionary set by the daemon). + Usually this contains the internal Pyro annotations such as hmac and correlation id, + and if you override the annotations method in the daemon, can contain your own annotations as well. + Code using Pyro 4.56 or newer can skip this and instead read the response_annotations directly from the context object. + """ + pass + + def _pyroValidateHandshake(self, response): + """ + Process and validate the initial connection handshake response data received from the daemon. + Simply return without error if everything is ok. + Raise an exception if something is wrong and the connection should not be made. + """ + pass + + def __annotations(self, clear=True): + annotations = current_context.annotations + if current_context.correlation_id: + annotations["CORR"] = current_context.correlation_id.bytes + else: + annotations.pop("CORR", None) + annotations.update(self._pyroAnnotations()) + if clear: + current_context.annotations = {} + return annotations + + def __serializeBlobArgs(self, vargs, kwargs, annotations, flags, objectId, methodname, serializer): + """ + Special handling of a "blob" argument that has to stay serialized until explicitly deserialized in client code. + This makes efficient, transparent gateways or dispatchers and such possible: + they don't have to de/reserialize the message and are independent from the serialized class definitions. + Annotations are passed in because some blob metadata is added. They're not part of the blob itself. + """ + if len(vargs) > 1 or kwargs: + raise errors.SerializeError("if SerializedBlob is used, it must be the only argument") + else: + blob = vargs[0] + flags |= message.FLAGS_KEEPSERIALIZED + import marshal + annotations["BLBI"] = marshal.dumps((blob.info, objectId, methodname)) + if blob._contains_blob: + protocol_msg = blob._data + data, compressed = protocol_msg.data, protocol_msg.flags & message.FLAGS_COMPRESSED + else: + data, compressed = serializer.serializeCall(objectId, methodname, (blob._data), kwargs, compress=(config.COMPRESSION)) + return ( + data, compressed, flags) + + +class _StreamResultIterator(object): + __doc__ = "\n Pyro returns this as a result of a remote call which returns an iterator or generator.\n It is a normal iterable and produces elements on demand from the remote iterator.\n You can simply use it in for loops, list comprehensions etc.\n " + + def __init__(self, streamId, proxy): + self.streamId = streamId + self.proxy = proxy + self.pyroseq = proxy._pyroSeq + + def __iter__(self): + return self + + def next(self): + return self.__next__() + + def __next__(self): + if self.proxy is None: + raise StopIteration + elif self.proxy._pyroConnection is None: + raise errors.ConnectionClosedError("the proxy for this stream result has been closed") + self.pyroseq += 1 + try: + return self.proxy._pyroInvoke("get_next_stream_item", [self.streamId], {}, objectId=(constants.DAEMON_NAME)) + except (StopIteration, GeneratorExit): + self.proxy = None + raise + + def __del__(self): + self.close() + + def close(self): + if self.proxy: + if self.proxy._pyroConnection is not None: + if self.pyroseq == self.proxy._pyroSeq: + self.proxy._pyroInvoke("close_stream", [self.streamId], {}, flags=(message.FLAGS_ONEWAY), + objectId=(constants.DAEMON_NAME)) + else: + try: + with self.proxy.__copy__() as closingProxy: + closingProxy._pyroInvoke("close_stream", [self.streamId], {}, flags=(message.FLAGS_ONEWAY), + objectId=(constants.DAEMON_NAME)) + except errors.CommunicationError: + pass + + self.proxy = None + + +class _BatchedRemoteMethod(object): + __doc__ = "method call abstraction that is used with batched calls" + + def __init__(self, calls, name): + self._BatchedRemoteMethod__calls = calls + self._BatchedRemoteMethod__name = name + + def __getattr__(self, name): + return _BatchedRemoteMethod(self._BatchedRemoteMethod__calls, "%s.%s" % (self._BatchedRemoteMethod__name, name)) + + def __call__(self, *args, **kwargs): + self._BatchedRemoteMethod__calls.append((self._BatchedRemoteMethod__name, args, kwargs)) + + +class _BatchProxyAdapter(object): + __doc__ = "Helper class that lets you batch multiple method calls into one.\n It is constructed with a reference to the normal proxy that will\n carry out the batched calls. Call methods on this object that you want to batch,\n and finally call the batch proxy itself. That call will return a generator\n for the results of every method call in the batch (in sequence)." + + def __init__(self, proxy): + self._BatchProxyAdapter__proxy = proxy + self._BatchProxyAdapter__calls = [] + + def __getattr__(self, name): + return _BatchedRemoteMethod(self._BatchProxyAdapter__calls, name) + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def __copy__(self): + copy = type(self)(self._BatchProxyAdapter__proxy) + copy._BatchProxyAdapter__calls = list(self._BatchProxyAdapter__calls) + return copy + + def __resultsgenerator(self, results): + for result in results: + if isinstance(result, futures._ExceptionWrapper): + result.raiseIt() + else: + yield result + + def __call__(self, oneway=False, asynchronous=False): + if oneway: + if asynchronous: + raise errors.PyroError("async oneway calls make no sense") + elif asynchronous: + return _AsyncRemoteMethod(self, "", self._BatchProxyAdapter__proxy._pyroMaxRetries)() + results = self._BatchProxyAdapter__proxy._pyroInvokeBatch(self._BatchProxyAdapter__calls, oneway) + self._BatchProxyAdapter__calls = [] + return oneway or self._BatchProxyAdapter__resultsgenerator(results) + + if sys.version_info < (3, 7): + call_37 = __call__ + + def __call__(self, oneway=False, **kwargs): + if kwargs: + kword = list(kwargs.keys()) + if kword != ["async"]: + if kword != ["asynchronous"]: + raise TypeError("__call__() got an unexpected keyword argument '{:s}'".format(kword[0])) + if kword == ["async"]: + kwargs = {"asynchronous": (kwargs["async"])} + kwargs["oneway"] = oneway + return (_BatchProxyAdapter.call_37)(self, **kwargs) + + def _pyroInvoke(self, name, args, kwargs): + results = self._BatchProxyAdapter__proxy._pyroInvokeBatch(self._BatchProxyAdapter__calls) + self._BatchProxyAdapter__calls = [] + return self._BatchProxyAdapter__resultsgenerator(results) + + +class _AsyncRemoteMethod(object): + __doc__ = "asynchronous method call abstraction (call will run in a background thread)" + + def __init__(self, proxy, name, max_retries): + self._AsyncRemoteMethod__proxy = proxy + self._AsyncRemoteMethod__name = name + self._AsyncRemoteMethod__max_retries = max_retries + + def __getattr__(self, name): + return _AsyncRemoteMethod(self._AsyncRemoteMethod__proxy, "%s.%s" % (self._AsyncRemoteMethod__name, name), self._AsyncRemoteMethod__max_retries) + + def __call__(self, *args, **kwargs): + result = futures.FutureResult() + thread = threading.Thread(target=(self._AsyncRemoteMethod__asynccall), args=(result, args, kwargs)) + thread.setDaemon(True) + thread.start() + return result + + def __asynccall(self, asyncresult, args, kwargs): + for attempt in range(self._AsyncRemoteMethod__max_retries + 1): + try: + with self._AsyncRemoteMethod__proxy.__copy__() as proxy: + delay = 0.1 + random.random() / 5 + while not proxy._pyroConnection: + try: + proxy._pyroBind() + except errors.CommunicationError as x: + try: + if "no free workers" not in str(x): + raise + time.sleep(delay) + delay += 0.4 + random.random() / 2 + if 0 < config.COMMTIMEOUT / 2 < delay: + raise + finally: + x = None + del x + + value = proxy._pyroInvoke(self._AsyncRemoteMethod__name, args, kwargs) + asyncresult.value = value + return + except (errors.ConnectionClosedError, errors.TimeoutError) as x: + try: + if attempt >= self._AsyncRemoteMethod__max_retries: + asyncresult.value = futures._ExceptionWrapper(x) + return + finally: + x = None + del x + + except Exception as x: + try: + asyncresult.value = futures._ExceptionWrapper(x) + return + finally: + x = None + del x + + +def batch(proxy): + """convenience method to get a batch proxy adapter""" + return proxy._pyroBatch() + + +def asyncproxy(proxy, asynchronous=True): + """convenience method to set proxy to asynchronous or sync mode.""" + proxy._pyroAsync(asynchronous) + + +def pyroObjectToAutoProxy(obj): + """reduce function that automatically replaces Pyro objects by a Proxy""" + if config.AUTOPROXY: + daemon = getattr(obj, "_pyroDaemon", None) + if daemon: + return daemon.proxyFor(obj) + return obj + + +def callback(method): + """ + decorator to mark a method to be a 'callback'. This will make Pyro + raise any errors also on the callback side, and not only on the side + that does the callback call. + """ + method._pyroCallback = True + return method + + +def oneway(method): + """ + decorator to mark a method to be oneway (client won't wait for a response) + """ + method._pyroOneway = True + return method + + +def expose(method_or_class): + """ + Decorator to mark a method or class to be exposed for remote calls (relevant when REQUIRE_EXPOSE=True) + You can apply it to a method or a class as a whole. + If you need to change the default instance mode or instance creator, also use a @behavior decorator. + """ + if inspect.isdatadescriptor(method_or_class): + func = method_or_class.fget or method_or_class.fset or method_or_class.fdel + if util.is_private_attribute(func.__name__): + raise AttributeError("exposing private names (starting with _) is not allowed") + func._pyroExposed = True + return method_or_class + attrname = getattr(method_or_class, "__name__", None) + if not attrname or isinstance(method_or_class, (classmethod, staticmethod)): + if inspect.ismethoddescriptor(method_or_class): + attrname = method_or_class.__get__(None, dict).__name__ + raise AttributeError("using @expose on a classmethod/staticmethod must be done after @classmethod/@staticmethod. Method: " + attrname) + else: + raise AttributeError("@expose cannot determine what this is: " + repr(method_or_class)) + if util.is_private_attribute(attrname): + raise AttributeError("exposing private names (starting with _) is not allowed") + if inspect.isclass(method_or_class): + clazz = method_or_class + log.debug("exposing all members of %r", clazz) + for name in clazz.__dict__: + if util.is_private_attribute(name): + continue + thing = getattr(clazz, name) + if not inspect.isfunction(thing): + if inspect.ismethoddescriptor(thing): + thing._pyroExposed = True + else: + if inspect.ismethod(thing): + thing.__func__._pyroExposed = True + if inspect.isdatadescriptor(thing): + if getattr(thing, "fset", None): + thing.fset._pyroExposed = True + if getattr(thing, "fget", None): + thing.fget._pyroExposed = True + if getattr(thing, "fdel", None): + thing.fdel._pyroExposed = True + + clazz._pyroExposed = True + return clazz + method_or_class._pyroExposed = True + return method_or_class + + +def behavior(instance_mode='session', instance_creator=None): + """ + Decorator to specify the server behavior of your Pyro class. + """ + + def _behavior(clazz): + if not inspect.isclass(clazz): + raise TypeError("behavior decorator can only be used on a class") + elif instance_mode not in ('single', 'session', 'percall'): + raise ValueError("invalid instance mode: " + instance_mode) + if instance_creator and not callable(instance_creator): + raise TypeError("instance_creator must be a callable") + clazz._pyroInstancing = ( + instance_mode, instance_creator) + return clazz + + if not isinstance(instance_mode, basestring): + raise SyntaxError("behavior decorator is missing argument(s)") + return _behavior + + +@expose +class DaemonObject(object): + __doc__ = "The part of the daemon that is exposed as a Pyro object." + + def __init__(self, daemon): + self.daemon = daemon + + def registered(self): + """returns a list of all object names registered in this daemon""" + return list(self.daemon.objectsById.keys()) + + def ping(self): + """a simple do-nothing method for testing purposes""" + pass + + def info(self): + """return some descriptive information about the daemon""" + return "%s bound on %s, NAT %s, %d objects registered. Servertype: %s" % ( + constants.DAEMON_NAME, self.daemon.locationStr, self.daemon.natLocationStr, + len(self.daemon.objectsById), self.daemon.transportServer) + + def get_metadata(self, objectId, as_lists=False): + """ + Get metadata for the given object (exposed methods, oneways, attributes). + If you get an error in your proxy saying that 'DaemonObject' has no attribute 'get_metadata', + you're probably connecting to an older Pyro version (4.26 or earlier). + Either upgrade the Pyro version or set METADATA config item to False in your client code. + """ + obj = self.daemon.objectsById.get(objectId) + if obj is not None: + metadata = util.get_exposed_members(obj, only_exposed=(config.REQUIRE_EXPOSE), as_lists=as_lists) + if config.REQUIRE_EXPOSE: + if not metadata["methods"]: + if not metadata["attrs"]: + if not inspect.isclass(obj): + obj = type(obj) + warnings.warn("Class %r doesn't expose any methods or attributes. Did you forget setting @expose on them?" % obj) + return metadata + log.debug("unknown object requested: %s", objectId) + raise errors.DaemonError("unknown object") + + def get_next_stream_item(self, streamId): + if streamId not in self.daemon.streaming_responses: + raise errors.PyroError("item stream terminated") + else: + client, timestamp, linger_timestamp, stream = self.daemon.streaming_responses[streamId] + if client is None: + self.daemon.streaming_responses[streamId] = (current_context.client, timestamp, 0, stream) + try: + return next(stream) + except Exception: + del self.daemon.streaming_responses[streamId] + raise + + def close_stream(self, streamId): + if streamId in self.daemon.streaming_responses: + del self.daemon.streaming_responses[streamId] + + +class Daemon(object): + __doc__ = "\n Pyro daemon. Contains server side logic and dispatches incoming remote method calls\n to the appropriate objects.\n " + + def __init__(self, host=None, port=0, unixsocket=None, nathost=None, natport=None, interface=DaemonObject, connected_socket=None): + if connected_socket: + nathost = natport = None + else: + if host is None: + host = config.HOST + else: + if nathost is None: + nathost = config.NATHOST + elif natport is None: + if nathost is not None: + natport = config.NATPORT + elif nathost and unixsocket: + raise ValueError("cannot use nathost together with unixsocket") + if (nathost is None) ^ (natport is None): + raise ValueError("must provide natport with nathost") + else: + self._Daemon__mustshutdown = threading.Event() + self._Daemon__mustshutdown.set() + self._Daemon__loopstopped = threading.Event() + self._Daemon__loopstopped.set() + if connected_socket: + from Pyro4.socketserver.existingconnectionserver import SocketServer_ExistingConnection + self.transportServer = SocketServer_ExistingConnection() + self.transportServer.init(self, connected_socket) + else: + if config.SERVERTYPE == "thread": + from Pyro4.socketserver.threadpoolserver import SocketServer_Threadpool + self.transportServer = SocketServer_Threadpool() + else: + if config.SERVERTYPE == "multiplex": + from Pyro4.socketserver.multiplexserver import SocketServer_Multiplex + self.transportServer = SocketServer_Multiplex() + else: + raise errors.PyroError("invalid server type '%s'" % config.SERVERTYPE) + self.transportServer.init(self, host, port, unixsocket) + self.locationStr = self.transportServer.locationStr + log.debug("daemon created on %s - %s (pid %d)", self.locationStr, socketutil.family_str(self.transportServer.sock), os.getpid()) + natport_for_loc = natport + if natport == 0: + natport_for_loc = int(self.locationStr.split(":")[1]) + self.natLocationStr = "%s:%d" % (nathost, natport_for_loc) if nathost else None + if self.natLocationStr: + log.debug("NAT address is %s", self.natLocationStr) + pyroObject = interface(self) + pyroObject._pyroId = constants.DAEMON_NAME + self.objectsById = {(pyroObject._pyroId): pyroObject} + self._Daemon__serializer_ids = {util.get_serializer(ser_name).serializer_id for ser_name in config.SERIALIZERS_ACCEPTED} + log.debug("accepted serializers: %s" % config.SERIALIZERS_ACCEPTED) + log.debug("pyro protocol version: %d pickle version: %d" % (constants.PROTOCOL_VERSION, config.PICKLE_PROTOCOL_VERSION)) + self._Daemon__pyroHmacKey = None + self._pyroInstances = {} + self.streaming_responses = {} + self.housekeeper_lock = threading.Lock() + self.create_single_instance_lock = threading.Lock() + self._Daemon__mustshutdown.clear() + + @property + def _pyroHmacKey(self): + return self._Daemon__pyroHmacKey + + @_pyroHmacKey.setter + def _pyroHmacKey(self, value): + if value: + if sys.version_info >= (3, 0): + if type(value) is not bytes: + value = value.encode("utf-8") + self._Daemon__pyroHmacKey = value + + @property + def sock(self): + """the server socket used by the daemon""" + return self.transportServer.sock + + @property + def sockets(self): + """list of all sockets used by the daemon (server socket and all active client sockets)""" + return self.transportServer.sockets + + @property + def selector(self): + """the multiplexing selector used, if using the multiplex server type""" + return self.transportServer.selector + + @staticmethod + def serveSimple(objects, host=None, port=0, daemon=None, ns=True, verbose=True): + """ + Basic method to fire up a daemon (or supply one yourself). + objects is a dict containing objects to register as keys, and + their names (or None) as values. If ns is true they will be registered + in the naming server as well, otherwise they just stay local. + If you need to publish on a unix domain socket you can't use this shortcut method. + See the documentation on 'publishing objects' (in chapter: Servers) for more details. + """ + if daemon is None: + daemon = Daemon(host, port) + with daemon: + if ns: + ns = _locateNS() + for obj, name in objects.items(): + if ns: + localname = None + else: + localname = name + uri = daemon.register(obj, localname) + if verbose: + print("Object {0}:\n uri = {1}".format(repr(obj), uri)) + if name and ns: + ns.register(name, uri) + if verbose: + print(" name = {0}".format(name)) + + if verbose: + print("Pyro daemon running.") + daemon.requestLoop() + + def requestLoop(self, loopCondition=(lambda: True)): + """ + Goes in a loop to service incoming requests, until someone breaks this + or calls shutdown from another thread. + """ + self._Daemon__mustshutdown.clear() + log.info("daemon %s entering requestloop", self.locationStr) + try: + self._Daemon__loopstopped.clear() + condition = lambda: not self._Daemon__mustshutdown.isSet() and loopCondition() + self.transportServer.loop(loopCondition=condition) + finally: + self._Daemon__loopstopped.set() + + log.debug("daemon exits requestloop") + + def events(self, eventsockets): + """for use in an external event loop: handle any requests that are pending for this daemon""" + return self.transportServer.events(eventsockets) + + def shutdown(self): + """Cleanly terminate a daemon that is running in the requestloop.""" + log.debug("daemon shutting down") + self.streaming_responses = {} + time.sleep(0.02) + self._Daemon__mustshutdown.set() + if self.transportServer: + self.transportServer.shutdown() + time.sleep(0.02) + self.close() + self._Daemon__loopstopped.wait(timeout=5) + + @property + def _shutting_down(self): + return self._Daemon__mustshutdown.is_set() + + def _handshake(self, conn, denied_reason=None): + """ + Perform connection handshake with new clients. + Client sends a MSG_CONNECT message with a serialized data payload. + If all is well, return with a CONNECT_OK message. + The reason we're not doing this with a MSG_INVOKE method call on the daemon + (like when retrieving the metadata) is because we need to force the clients + to get past an initial connect handshake before letting them invoke any method. + Return True for successful handshake, False if something was wrong. + If a denied_reason is given, the handshake will fail with the given reason. + """ + serializer_id = util.MarshalSerializer.serializer_id + msg_seq = 0 + try: + msg = message.Message.recv(conn, [message.MSG_CONNECT], hmac_key=(self._pyroHmacKey)) + msg_seq = msg.seq + if denied_reason: + raise Exception(denied_reason) + else: + if config.LOGWIRE: + _log_wiredata(log, "daemon handshake received", msg) + elif msg.serializer_id not in self._Daemon__serializer_ids: + raise errors.SerializeError("message used serializer that is not accepted: %d" % msg.serializer_id) + if "CORR" in msg.annotations: + current_context.correlation_id = uuid.UUID(bytes=(msg.annotations["CORR"])) + else: + current_context.correlation_id = uuid.uuid4() + serializer_id = msg.serializer_id + serializer = util.get_serializer_by_id(serializer_id) + data = serializer.deserializeData(msg.data, msg.flags & message.FLAGS_COMPRESSED) + handshake_response = self.validateHandshake(conn, data["handshake"]) + if msg.flags & message.FLAGS_META_ON_CONNECT: + flags = message.FLAGS_META_ON_CONNECT + handshake_response = {'handshake':handshake_response, + 'meta':self.objectsById[constants.DAEMON_NAME].get_metadata((data["object"]), as_lists=True)} + else: + flags = 0 + data, compressed = serializer.serializeData(handshake_response, config.COMPRESSION) + msgtype = message.MSG_CONNECTOK + if compressed: + flags |= message.FLAGS_COMPRESSED + except errors.ConnectionClosedError: + log.debug("handshake failed, connection closed early") + return False + except Exception as x: + try: + log.debug("handshake failed, reason:", exc_info=True) + serializer = util.get_serializer_by_id(serializer_id) + data, compressed = serializer.serializeData(str(x), False) + msgtype = message.MSG_CONNECTFAIL + flags = message.FLAGS_COMPRESSED if compressed else 0 + finally: + x = None + del x + + msg = message.Message(msgtype, data, serializer_id, flags, msg_seq, annotations=(self._Daemon__annotations()), hmac_key=(self._pyroHmacKey)) + if config.LOGWIRE: + _log_wiredata(log, "daemon handshake response", msg) + conn.send(msg.to_bytes()) + return msg.type == message.MSG_CONNECTOK + + def validateHandshake(self, conn, data): + """ + Override this to create a connection validator for new client connections. + It should return a response data object normally if the connection is okay, + or should raise an exception if the connection should be denied. + """ + return "hello" + + def clientDisconnect(self, conn): + """ + Override this to handle a client disconnect. + Conn is the SocketConnection object that was disconnected. + """ + pass + + def handleRequest(self, conn): + """ + Handle incoming Pyro request. Catches any exception that may occur and + wraps it in a reply to the calling side, as to not make this server side loop + terminate due to exceptions caused by remote invocations. + """ + request_flags = 0 + request_seq = 0 + request_serializer_id = util.MarshalSerializer.serializer_id + wasBatched = False + isCallback = False + try: + msg = message.Message.recv(conn, [message.MSG_INVOKE, message.MSG_PING], hmac_key=(self._pyroHmacKey)) + except errors.CommunicationError as x: + try: + raise x + finally: + x = None + del x + + try: + request_flags = msg.flags + request_seq = msg.seq + request_serializer_id = msg.serializer_id + current_context.correlation_id = uuid.UUID(bytes=(msg.annotations["CORR"])) if "CORR" in msg.annotations else uuid.uuid4() + if config.LOGWIRE: + _log_wiredata(log, "daemon wiredata received", msg) + if msg.type == message.MSG_PING: + msg = message.Message((message.MSG_PING), b'pong', (msg.serializer_id), 0, (msg.seq), annotations=(self._Daemon__annotations()), + hmac_key=(self._pyroHmacKey)) + if config.LOGWIRE: + _log_wiredata(log, "daemon wiredata sending", msg) + else: + conn.send(msg.to_bytes()) + return + if msg.serializer_id not in self._Daemon__serializer_ids: + raise errors.SerializeError("message used serializer that is not accepted: %d" % msg.serializer_id) + serializer = util.get_serializer_by_id(msg.serializer_id) + if request_flags & message.FLAGS_KEEPSERIALIZED: + objId, method, vargs, kwargs = self._Daemon__deserializeBlobArgs(msg) + else: + objId, method, vargs, kwargs = serializer.deserializeCall((msg.data), compressed=(msg.flags & message.FLAGS_COMPRESSED)) + current_context.client = conn + try: + current_context.client_sock_addr = conn.sock.getpeername() + except socket.error: + current_context.client_sock_addr = None + + current_context.seq = msg.seq + current_context.annotations = msg.annotations + current_context.msg_flags = msg.flags + current_context.serializer_id = msg.serializer_id + del msg + obj = self.objectsById.get(objId) + if obj is not None: + if inspect.isclass(obj): + obj = self._getInstance(obj, conn) + if request_flags & message.FLAGS_BATCH: + data = [] + for method, vargs, kwargs in vargs: + method = util.getAttribute(obj, method) + try: + result = method(*vargs, **kwargs) + except Exception: + xt, xv = sys.exc_info()[0[:2]] + log.debug("Exception occurred while handling batched request: %s", xv) + xv._pyroTraceback = util.formatTraceback(detailed=(config.DETAILED_TRACEBACK)) + if sys.platform == "cli": + util.fixIronPythonExceptionForPickle(xv, True) + data.append(futures._ExceptionWrapper(xv)) + break + else: + data.append(result) + + wasBatched = True + elif method == "__getattr__": + data = util.get_exposed_property_value(obj, (vargs[0]), only_exposed=(config.REQUIRE_EXPOSE)) + else: + if method == "__setattr__": + data = util.set_exposed_property_value(obj, (vargs[0]), (vargs[1]), only_exposed=(config.REQUIRE_EXPOSE)) + else: + method = util.getAttribute(obj, method) + if request_flags & message.FLAGS_ONEWAY and config.ONEWAY_THREADED: + _OnewayCallThread(target=method, args=vargs, kwargs=kwargs).start() + else: + isCallback = getattr(method, "_pyroCallback", False) + data = method(*vargs, **kwargs) + if not request_flags & message.FLAGS_ONEWAY: + isStream, data = self._streamResponse(data, conn) + if isStream: + exc = errors.ProtocolError("result of call is an iterator") + ann = {"STRM": (data.encode())} if data else {} + self._sendExceptionResponse(conn, request_seq, (serializer.serializer_id), exc, None, annotations=ann, + flags=(message.FLAGS_ITEMSTREAMRESULT)) + return + else: + log.debug("unknown object requested: %s", objId) + raise errors.DaemonError("unknown object") + if request_flags & message.FLAGS_ONEWAY: + return + data, compressed = serializer.serializeData(data, compress=(config.COMPRESSION)) + response_flags = 0 + if compressed: + response_flags |= message.FLAGS_COMPRESSED + if wasBatched: + response_flags |= message.FLAGS_BATCH + msg = message.Message((message.MSG_RESULT), data, (serializer.serializer_id), response_flags, request_seq, annotations=(self._Daemon__annotations()), + hmac_key=(self._pyroHmacKey)) + current_context.response_annotations = {} + if config.LOGWIRE: + _log_wiredata(log, "daemon wiredata sending", msg) + conn.send(msg.to_bytes()) + except Exception: + xt, xv = sys.exc_info()[0[:2]] + msg = getattr(xv, "pyroMsg", None) + if msg: + request_seq = msg.seq + request_serializer_id = msg.serializer_id + if xt is not errors.ConnectionClosedError: + if xt not in (StopIteration, GeneratorExit): + log.debug("Exception occurred while handling request: %r", xv) + if not request_flags & message.FLAGS_ONEWAY: + tblines = isinstance(xv, errors.SerializeError) or isinstance(xv, errors.CommunicationError) or util.formatTraceback(detailed=(config.DETAILED_TRACEBACK)) + self._sendExceptionResponse(conn, request_seq, request_serializer_id, xv, tblines) + if isCallback or isinstance(xv, (errors.CommunicationError, errors.SecurityError)): + raise + + def _clientDisconnect(self, conn): + if config.ITER_STREAM_LINGER > 0: + for streamId in list(self.streaming_responses): + info = self.streaming_responses.get(streamId, None) + if info and info[0] is conn: + _, timestamp, _, stream = info + self.streaming_responses[streamId] = (None, timestamp, time.time(), stream) + + else: + for streamId in list(self.streaming_responses): + info = self.streaming_responses.get(streamId, None) + if info and info[0] is conn: + del self.streaming_responses[streamId] + + self.clientDisconnect(conn) + + def _housekeeping(self): + """ + Perform periodical housekeeping actions (cleanups etc) + """ + if self._shutting_down: + return + with self.housekeeper_lock: + if self.streaming_responses: + if config.ITER_STREAM_LIFETIME > 0: + for streamId in list(self.streaming_responses.keys()): + info = self.streaming_responses.get(streamId, None) + if info: + last_use_period = time.time() - info[1] + if 0 < config.ITER_STREAM_LIFETIME < last_use_period: + del self.streaming_responses[streamId] + + if config.ITER_STREAM_LINGER > 0: + for streamId in list(self.streaming_responses.keys()): + info = self.streaming_responses.get(streamId, None) + if info and info[2]: + linger_period = time.time() - info[2] + if linger_period > config.ITER_STREAM_LINGER: + del self.streaming_responses[streamId] + + self.housekeeping() + + def housekeeping(self): + """ + Override this to add custom periodic housekeeping (cleanup) logic. + This will be called every few seconds by the running daemon's request loop. + """ + pass + + def _getInstance(self, clazz, conn): + """ + Find or create a new instance of the class + """ + + def createInstance(clazz, creator): + try: + if creator: + obj = creator(clazz) + if isinstance(obj, clazz): + return obj + raise TypeError("instance creator returned object of different type") + return clazz() + except Exception: + log.exception("could not create pyro object instance") + raise + + instance_mode, instance_creator = clazz._pyroInstancing + if instance_mode == "single": + with self.create_single_instance_lock: + instance = self._pyroInstances.get(clazz) + if not instance: + log.debug("instancemode %s: creating new pyro object for %s", instance_mode, clazz) + instance = createInstance(clazz, instance_creator) + self._pyroInstances[clazz] = instance + return instance + else: + if instance_mode == "session": + instance = conn.pyroInstances.get(clazz) + if not instance: + log.debug("instancemode %s: creating new pyro object for %s", instance_mode, clazz) + instance = createInstance(clazz, instance_creator) + conn.pyroInstances[clazz] = instance + return instance + if instance_mode == "percall": + log.debug("instancemode %s: creating new pyro object for %s", instance_mode, clazz) + return createInstance(clazz, instance_creator) + raise errors.DaemonError("invalid instancemode in registered class") + + def _sendExceptionResponse(self, connection, seq, serializer_id, exc_value, tbinfo, flags=0, annotations=None): + """send an exception back including the local traceback info""" + exc_value._pyroTraceback = tbinfo + if sys.platform == "cli": + util.fixIronPythonExceptionForPickle(exc_value, True) + serializer = util.get_serializer_by_id(serializer_id) + try: + data, compressed = serializer.serializeData(exc_value) + except: + xt, xv, tb = sys.exc_info() + msg = "Error serializing exception: %s. Original exception: %s: %s" % (str(xv), type(exc_value), str(exc_value)) + exc_value = errors.PyroError(msg) + exc_value._pyroTraceback = tbinfo + if sys.platform == "cli": + util.fixIronPythonExceptionForPickle(exc_value, True) + data, compressed = serializer.serializeData(exc_value) + + flags |= message.FLAGS_EXCEPTION + if compressed: + flags |= message.FLAGS_COMPRESSED + annotations = dict(annotations or {}) + annotations.update(self.annotations()) + msg = message.Message((message.MSG_RESULT), data, (serializer.serializer_id), flags, seq, annotations=annotations, + hmac_key=(self._pyroHmacKey)) + if config.LOGWIRE: + _log_wiredata(log, "daemon wiredata sending (error response)", msg) + connection.send(msg.to_bytes()) + + def register(self, obj_or_class, objectId=None, force=False): + """ + Register a Pyro object under the given id. Note that this object is now only + known inside this daemon, it is not automatically available in a name server. + This method returns a URI for the registered object. + Pyro checks if an object is already registered, unless you set force=True. + You can register a class or an object (instance) directly. + For a class, Pyro will create instances of it to handle the remote calls according + to the instance_mode (set via @expose on the class). The default there is one object + per session (=proxy connection). If you register an object directly, Pyro will use + that single object for *all* remote calls. + """ + if objectId and not isinstance(objectId, basestring): + raise TypeError("objectId must be a string or None") + else: + objectId = "obj_" + uuid.uuid4().hex + if inspect.isclass(obj_or_class): + if not hasattr(obj_or_class, "_pyroInstancing"): + obj_or_class._pyroInstancing = ('session', None) + if not force: + if hasattr(obj_or_class, "_pyroId"): + if obj_or_class._pyroId != "": + raise errors.DaemonError("object or class already has a Pyro id") + if objectId in self.objectsById: + raise errors.DaemonError("an object or class is already registered with that id") + obj_or_class._pyroId = objectId + obj_or_class._pyroDaemon = self + if config.AUTOPROXY: + for ser in util._serializers.values(): + if inspect.isclass(obj_or_class): + ser.register_type_replacement(obj_or_class, pyroObjectToAutoProxy) + else: + ser.register_type_replacement(type(obj_or_class), pyroObjectToAutoProxy) + + self.objectsById[obj_or_class._pyroId] = obj_or_class + return self.uriFor(objectId) + + def unregister(self, objectOrId): + """ + Remove a class or object from the known objects inside this daemon. + You can unregister the class/object directly, or with its id. + """ + if objectOrId is None: + raise ValueError("object or objectid argument expected") + else: + objectId = isinstance(objectOrId, basestring) or getattr(objectOrId, "_pyroId", None) + if objectId is None: + raise errors.DaemonError("object isn't registered") + else: + objectId = objectOrId + objectOrId = None + if objectId == constants.DAEMON_NAME: + return + if objectId in self.objectsById: + del self.objectsById[objectId] + if objectOrId is not None: + del objectOrId._pyroId + del objectOrId._pyroDaemon + + def uriFor(self, objectOrId, nat=True): + """ + Get a URI for the given object (or object id) from this daemon. + Only a daemon can hand out proper uris because the access location is + contained in them. + Note that unregistered objects cannot be given an uri, but unregistered + object names can (it's just a string we're creating in that case). + If nat is set to False, the configured NAT address (if any) is ignored and it will + return an URI for the internal address. + """ + if not isinstance(objectOrId, basestring): + objectOrId = getattr(objectOrId, "_pyroId", None) + if objectOrId is None or objectOrId not in self.objectsById: + raise errors.DaemonError("object isn't registered in this daemon") + elif nat: + loc = self.natLocationStr or self.locationStr + else: + loc = self.locationStr + return URI("PYRO:%s@%s" % (objectOrId, loc)) + + def resetMetadataCache(self, objectOrId, nat=True): + """Reset cache of metadata when a Daemon has available methods/attributes + dynamically updated. Clients will have to get a new proxy to see changes""" + uri = self.uriFor(objectOrId, nat) + if uri.object in self.objectsById: + registered_object = self.objectsById[uri.object] + util.reset_exposed_members(registered_object, (config.REQUIRE_EXPOSE), as_lists=True) + util.reset_exposed_members(registered_object, (config.REQUIRE_EXPOSE), as_lists=False) + + def proxyFor(self, objectOrId, nat=True): + """ + Get a fully initialized Pyro Proxy for the given object (or object id) for this daemon. + If nat is False, the configured NAT address (if any) is ignored. + The object or id must be registered in this daemon, or you'll get an exception. + (you can't get a proxy for an unknown object) + """ + uri = self.uriFor(objectOrId, nat) + proxy = Proxy(uri) + try: + registered_object = self.objectsById[uri.object] + except KeyError: + raise errors.DaemonError("object isn't registered in this daemon") + + meta = util.get_exposed_members(registered_object, only_exposed=(config.REQUIRE_EXPOSE)) + proxy._pyroGetMetadata(known_metadata=meta) + return proxy + + def close(self): + """Close down the server and release resources""" + self._Daemon__mustshutdown.set() + self.streaming_responses = {} + if self.transportServer: + log.debug("daemon closing") + self.transportServer.close() + self.transportServer = None + + def annotations(self): + """Override to return a dict with custom user annotations to be sent with each response message.""" + return {} + + def combine(self, daemon): + """ + Combines the event loop of the other daemon in the current daemon's loop. + You can then simply run the current daemon's requestLoop to serve both daemons. + This works fine on the multiplex server type, but doesn't work with the threaded server type. + """ + log.debug("combining event loop with other daemon") + self.transportServer.combine_loop(daemon.transportServer) + + def __annotations(self): + annotations = current_context.response_annotations + if current_context.correlation_id: + annotations["CORR"] = current_context.correlation_id.bytes + else: + annotations.pop("CORR", None) + annotations.update(self.annotations()) + return annotations + + def __repr__(self): + if hasattr(self, "locationStr"): + family = socketutil.family_str(self.sock) + return "<%s.%s at 0x%x; %s - %s; %d objects>" % (self.__class__.__module__, self.__class__.__name__, + id(self), self.locationStr, family, len(self.objectsById)) + return "<%s.%s at 0x%x; unusable>" % (self.__class__.__module__, self.__class__.__name__, id(self)) + + def __enter__(self): + if not self.transportServer: + raise errors.PyroError("cannot reuse this object") + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def __getstate__(self): + return {} + + def __getstate_for_dict__(self): + return tuple(self.__getstate__()) + + def __setstate_from_dict__(self, state): + pass + + if sys.version_info < (3, 0): + _Daemon__lazy_dict_iterator_types = ( + type({}.iterkeys()), type({}.itervalues()), type({}.iteritems())) + else: + _Daemon__lazy_dict_iterator_types = ( + type({}.keys()), type({}.values()), type({}.items())) + + def _streamResponse(self, data, client): + if sys.version_info < (3, 4): + from collections import Iterator + else: + from collections.abc import Iterator + if isinstance(data, Iterator) or inspect.isgenerator(data): + if config.ITER_STREAMING: + if type(data) in self._Daemon__lazy_dict_iterator_types: + raise errors.PyroError("won't serialize or stream lazy dict iterators, convert to list yourself") + stream_id = str(uuid.uuid4()) + self.streaming_responses[stream_id] = (client, time.time(), 0, data) + return (True, stream_id) + return (True, None) + return ( + False, data) + + def __deserializeBlobArgs(self, protocolmsg): + import marshal + blobinfo = protocolmsg.annotations["BLBI"] + if sys.platform == "cli": + if type(blobinfo) is not str: + blobinfo = str(blobinfo) + blobinfo, objId, method = marshal.loads(blobinfo) + blob = SerializedBlob(blobinfo, protocolmsg, is_blob=True) + return (objId, method, (blob,), {}) + + +try: + import serpent + + def pyro_class_serpent_serializer(obj, serializer, stream, level): + d = util.SerializerBase.class_to_dict(obj) + serializer.ser_builtins_dict(d, stream, level) + + + serpent.register_class(URI, pyro_class_serpent_serializer) + serpent.register_class(Proxy, pyro_class_serpent_serializer) + serpent.register_class(Daemon, pyro_class_serpent_serializer) + serpent.register_class(futures._ExceptionWrapper, pyro_class_serpent_serializer) +except ImportError: + pass + +def serialize_core_object_to_dict(obj): + return {'__class__':"Pyro4.core." + (obj.__class__.__name__), + 'state':(obj.__getstate_for_dict__)()} + + +util.SerializerBase.register_class_to_dict(URI, serialize_core_object_to_dict, serpent_too=False) +util.SerializerBase.register_class_to_dict(Proxy, serialize_core_object_to_dict, serpent_too=False) +util.SerializerBase.register_class_to_dict(Daemon, serialize_core_object_to_dict, serpent_too=False) +util.SerializerBase.register_class_to_dict((futures._ExceptionWrapper), (futures._ExceptionWrapper.__serialized_dict__), serpent_too=False) + +def _log_wiredata(logger, text, msg): + """logs all the given properties of the wire message in the given logger""" + corr = str(uuid.UUID(bytes=(msg.annotations["CORR"]))) if "CORR" in msg.annotations else "?" + logger.debug("%s: msgtype=%d flags=0x%x ser=%d seq=%d corr=%s\nannotations=%r\ndata=%r" % ( + text, msg.type, msg.flags, msg.serializer_id, msg.seq, corr, msg.annotations, msg.data)) + + +class _CallContext(threading.local): + + def __init__(self): + self.client = None + self.client_sock_addr = None + self.seq = 0 + self.msg_flags = 0 + self.serializer_id = 0 + self.annotations = {} + self.response_annotations = {} + self.correlation_id = None + + def to_global(self): + if sys.platform != "cli": + return dict(self.__dict__) + return {'client':self.client, + 'seq':self.seq, + 'msg_flags':self.msg_flags, + 'serializer_id':self.serializer_id, + 'annotations':self.annotations, + 'response_annotations':self.response_annotations, + 'correlation_id':self.correlation_id, + 'client_sock_addr':self.client_sock_addr} + + def from_global(self, values): + self.client = values["client"] + self.seq = values["seq"] + self.msg_flags = values["msg_flags"] + self.serializer_id = values["serializer_id"] + self.annotations = values["annotations"] + self.response_annotations = values["response_annotations"] + self.correlation_id = values["correlation_id"] + self.client_sock_addr = values["client_sock_addr"] + + def track_resource(self, resource): + """keep a weak reference to the resource to be tracked for this connection""" + if self.client: + self.client.tracked_resources.add(resource) + else: + raise errors.PyroError("cannot track resource on a connectionless call") + + def untrack_resource(self, resource): + """no longer track the resource for this connection""" + if self.client: + self.client.tracked_resources.discard(resource) + else: + raise errors.PyroError("cannot untrack resource on a connectionless call") + + +class _OnewayCallThread(threading.Thread): + + def __init__(self, target, args, kwargs): + super(_OnewayCallThread, self).__init__(target=target, args=args, kwargs=kwargs, name="oneway-call") + self.daemon = True + self.parent_context = current_context.to_global() + + def run(self): + current_context.from_global(self.parent_context) + super(_OnewayCallThread, self).run() + + +def _resolve(uri, hmac_key=None): + """ + Resolve a 'magic' uri (PYRONAME, PYROMETA) into the direct PYRO uri. + It finds a name server, and use that to resolve a PYRONAME uri into the direct PYRO uri pointing to the named object. + If uri is already a PYRO uri, it is returned unmodified. + You can consider this a shortcut function so that you don't have to locate and use a name server proxy yourself. + Note: if you need to resolve more than a few names, consider using the name server directly instead of repeatedly + calling this function, to avoid the name server lookup overhead from each call. + """ + if isinstance(uri, basestring): + uri = URI(uri) + else: + if not isinstance(uri, URI): + raise TypeError("can only resolve Pyro URIs") + elif uri.protocol == "PYRO": + return uri + log.debug("resolving %s", uri) + if uri.protocol == "PYRONAME": + with _locateNS((uri.host), (uri.port), hmac_key=hmac_key) as nameserver: + return nameserver.lookup(uri.object) + elif uri.protocol == "PYROMETA": + with _locateNS((uri.host), (uri.port), hmac_key=hmac_key) as nameserver: + candidates = nameserver.list(metadata_all=(uri.object)) + if candidates: + candidate = random.choice(list(candidates.values())) + log.debug("resolved to candidate %s", candidate) + return URI(candidate) + raise errors.NamingError("no registrations available with desired metadata properties %s" % uri.object) + else: + raise errors.PyroError("invalid uri protocol") + + +def _locateNS(host=None, port=None, broadcast=True, hmac_key=None): + """Get a proxy for a name server somewhere in the network.""" + if host is None: + if config.NS_HOST in ('localhost', '::1') or config.NS_HOST.startswith("127."): + if ":" in config.NS_HOST: + hosts = [ + "[%s]" % config.NS_HOST] + else: + try: + socket.gethostbyaddr("127.0.1.1") + hosts = [config.NS_HOST] if config.NS_HOST == "127.0.1.1" else [config.NS_HOST, "127.0.1.1"] + except socket.error: + hosts = [ + config.NS_HOST] + + for host in hosts: + uristring = "PYRO:%s@%s:%d" % (constants.NAMESERVER_NAME, host, port or config.NS_PORT) + log.debug("locating the NS: %s", uristring) + proxy = Proxy(uristring) + proxy._pyroHmacKey = hmac_key + try: + proxy._pyroBind() + log.debug("located NS") + return proxy + except errors.PyroError: + pass + + elif config.PREFER_IP_VERSION == 6: + broadcast = False + if broadcast: + if not port: + port = config.NS_BCPORT + log.debug("broadcast locate") + sock = socketutil.createBroadcastSocket(reuseaddr=(config.SOCK_REUSE), timeout=0.7) + for _ in range(3): + try: + for bcaddr in config.parseAddressesString(config.BROADCAST_ADDRS): + try: + sock.sendto(b'GET_NSURI', 0, (bcaddr, port)) + except socket.error as x: + try: + err = getattr(x, "errno", x.args[0]) + if err not in socketutil.ERRNO_EADDRNOTAVAIL: + if err not in socketutil.ERRNO_EADDRINUSE: + raise + finally: + x = None + del x + + data, _ = sock.recvfrom(100) + sock.close() + if sys.version_info >= (3, 0): + data = data.decode("iso-8859-1") + log.debug("located NS: %s", data) + proxy = Proxy(data) + proxy._pyroHmacKey = hmac_key + return proxy + except socket.timeout: + continue + + try: + sock.shutdown(socket.SHUT_RDWR) + except (OSError, socket.error): + pass + + sock.close() + log.debug("broadcast locate failed, try direct connection on NS_HOST") + else: + log.debug("skipping broadcast lookup") + host = config.NS_HOST + port = config.NS_PORT + if not port: + port = config.NS_PORT + if URI.isUnixsockLocation(host): + uristring = "PYRO:%s@%s" % (constants.NAMESERVER_NAME, host) + else: + if ":" in host: + host = "[%s]" % host + uristring = "PYRO:%s@%s:%d" % (constants.NAMESERVER_NAME, host, port) + uri = URI(uristring) + log.debug("locating the NS: %s", uri) + proxy = Proxy(uri) + proxy._pyroHmacKey = hmac_key + try: + proxy._pyroBind() + log.debug("located NS") + return proxy + except errors.PyroError as x: + try: + e = errors.NamingError("Failed to locate the nameserver") + if sys.version_info >= (3, 0): + e.__cause__ = x + raise e + finally: + x = None + del x + + +class SerializedBlob(object): + __doc__ = "\n Used to wrap some data to make Pyro pass this object transparently (it keeps the serialized payload as-is)\n Only when you need to access the actual client data you can deserialize on demand.\n This makes efficient, transparent gateways or dispatchers and such possible:\n they don't have to de/reserialize the message and are independent from the serialized class definitions.\n You have to pass this as the only parameter to a remote method call for Pyro to understand it.\n Init arguments:\n ``info`` = some (small) descriptive data about the blob. Can be a simple id or name or guid. Must be marshallable.\n ``data`` = the actual client data payload that you want to transfer in the blob. Can be anything that you would\n otherwise have used as regular remote call arguments.\n " + + def __init__(self, info, data, is_blob=False): + self.info = info + self._data = data + self._contains_blob = is_blob + + def deserialized(self): + """Retrieves the client data stored in this blob. Deserializes the data automatically if required.""" + if self._contains_blob: + protocol_msg = self._data + serializer = util.get_serializer_by_id(protocol_msg.serializer_id) + _, _, data, _ = serializer.deserializeData(protocol_msg.data, protocol_msg.flags & message.FLAGS_COMPRESSED) + return data + return self._data + + +current_context = _CallContext() +if sys.version_info < (3, 7): + + def asyncproxy(proxy, asynchronous=True, **kwargs): + """convenience method to set proxy to asynchronous or sync mode.""" + if kwargs: + kword = list(kwargs.keys()) + if kword != ["async"]: + raise TypeError("asyncproxy() got an unexpected keyword argument '{:s}'".format(kword[0])) + asynchronous = kwargs["async"] + proxy._pyroAsync(asynchronous) + + + current_module = sys.modules[__name__] + pyro4_module = __import__("Pyro4") + current_module.__dict__["async"] = pyro4_module.__dict__["async"] = asyncproxy diff --git a/APPS_UNCOMPILED/lib/Pyro4/errors.py b/APPS_UNCOMPILED/lib/Pyro4/errors.py new file mode 100644 index 0000000..ab0856c --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/errors.py @@ -0,0 +1,51 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/errors.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1392 bytes +""" +Definition of the various exceptions that are used in Pyro. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" + +class PyroError(Exception): + __doc__ = "Generic base of all Pyro-specific errors." + + +class CommunicationError(PyroError): + __doc__ = "Base class for the errors related to network communication problems." + + +class ConnectionClosedError(CommunicationError): + __doc__ = "The connection was unexpectedly closed." + + +class TimeoutError(CommunicationError): + __doc__ = "\n A call could not be completed within the set timeout period,\n or the network caused a timeout.\n " + + +class ProtocolError(CommunicationError): + __doc__ = "Pyro received a message that didn't match the active Pyro network protocol, or there was a protocol related error." + + +class MessageTooLargeError(ProtocolError): + __doc__ = "Pyro received a message or was trying to send a message that exceeds the maximum message size as configured." + + +class NamingError(PyroError): + __doc__ = "There was a problem related to the name server or object names." + + +class DaemonError(PyroError): + __doc__ = "The Daemon encountered a problem." + + +class SecurityError(PyroError): + __doc__ = "A security related error occurred." + + +class SerializeError(ProtocolError): + __doc__ = "Something went wrong while (de)serializing data." diff --git a/APPS_UNCOMPILED/lib/Pyro4/futures.py b/APPS_UNCOMPILED/lib/Pyro4/futures.py new file mode 100644 index 0000000..7b86b44 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/futures.py @@ -0,0 +1,212 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/futures.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 8749 bytes +""" +Support for Futures (asynchronously executed callables). +If you're using Python 3.2 or newer, also see +http://docs.python.org/3/library/concurrent.futures.html#future-objects + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +import sys, functools, logging, threading, time +__all__ = [ + "Future", "FutureResult", "_ExceptionWrapper"] +log = logging.getLogger("Pyro4.futures") + +class Future(object): + __doc__ = "\n Holds a callable that will be executed asynchronously and provide its\n result value some time in the future.\n This is a more general implementation than the AsyncRemoteMethod, which\n only works with Pyro proxies (and provides a bit different syntax).\n This class has a few extra features as well (delay, canceling).\n " + + def __init__(self, somecallable): + self.callable = somecallable + self.chain = [] + self.exceptionhandler = None + self.call_delay = 0 + self.cancelled = False + self.completed = False + + def __call__(self, *args, **kwargs): + """ + Start the future call with the provided arguments. + Control flow returns immediately, with a FutureResult object. + """ + if not (self.completed or hasattr(self, "chain")): + raise RuntimeError("the future has already been evaluated") + if self.cancelled: + raise RuntimeError("the future has been cancelled") + chain = self.chain + del self.chain + result = FutureResult() + thread = threading.Thread(target=(self._Future__asynccall), args=(result, chain, args, kwargs)) + thread.setDaemon(True) + thread.start() + return result + + def __asynccall(self, asyncresult, chain, args, kwargs): + while self.call_delay > 0: + delay = self.cancelled or min(self.call_delay, 2) + time.sleep(delay) + self.call_delay -= delay + + if self.cancelled: + self.completed = True + asyncresult.set_cancelled() + return + try: + self.completed = True + self.cancelled = False + value = (self.callable)(*args, **kwargs) + for call, args, kwargs in chain: + call = functools.partial(call, value) + value = call(*args, **kwargs) + + asyncresult.value = value + except Exception as x: + try: + if self.exceptionhandler: + self.exceptionhandler(x) + asyncresult.value = _ExceptionWrapper(x) + finally: + x = None + del x + + def delay(self, seconds): + """ + Delay the evaluation of the future for the given number of seconds. + Return True if successful otherwise False if the future has already been evaluated. + """ + if self.completed: + return False + self.call_delay = seconds + return True + + def cancel(self): + """ + Cancels the execution of the future altogether. + If the execution hasn't been started yet, the cancellation is successful and returns True. + Otherwise, it failed and returns False. + """ + if self.completed: + return False + self.cancelled = True + return True + + def then(self, call, *args, **kwargs): + """ + Add a callable to the call chain, to be invoked when the results become available. + The result of the current call will be used as the first argument for the next call. + Optional extra arguments can be provided in args and kwargs. + Returns self so you can easily chain then() calls. + """ + self.chain.append((call, args, kwargs)) + return self + + def iferror(self, exceptionhandler): + """ + Specify the exception handler to be invoked (with the exception object as only + argument) when calculating the result raises an exception. + If no exception handler is set, any exception raised in the asynchronous call will be silently ignored. + Returns self so you can easily chain other calls. + """ + self.exceptionhandler = exceptionhandler + return self + + +class FutureResult(object): + __doc__ = "\n The result object for asynchronous Pyro calls.\n Unfortunatley it should be similar to the more general Future class but\n it is still somewhat limited (no delay, no canceling).\n " + + def __init__(self): + self._FutureResult__ready = threading.Event() + self.callchain = [] + self.valueLock = threading.Lock() + self.exceptionhandler = None + + def wait(self, timeout=None): + """ + Wait for the result to become available, with optional timeout (in seconds). + Returns True if the result is ready, or False if it still isn't ready. + """ + result = self._FutureResult__ready.wait(timeout) + if result is None: + return self._FutureResult__ready.isSet() + return result + + @property + def ready(self): + """Boolean that contains the readiness of the asynchronous result""" + return self._FutureResult__ready.isSet() + + def get_value(self): + self._FutureResult__ready.wait() + if isinstance(self._FutureResult__value, _ExceptionWrapper): + self._FutureResult__value.raiseIt() + else: + return self._FutureResult__value + + def set_value(self, value): + with self.valueLock: + self._FutureResult__value = value + if isinstance(value, _ExceptionWrapper): + if self.exceptionhandler: + self.exceptionhandler(value.exception) + else: + for call, args, kwargs in self.callchain: + call = functools.partial(call, self._FutureResult__value) + self._FutureResult__value = call(*args, **kwargs) + if isinstance(self._FutureResult__value, _ExceptionWrapper): + break + + self.callchain = [] + self._FutureResult__ready.set() + + value = property(get_value, set_value, None, "The result value of the call. Reading it will block if not available yet.") + + def set_cancelled(self): + self.set_value(_ExceptionWrapper(RuntimeError("future has been cancelled"))) + + def then(self, call, *args, **kwargs): + """ + Add a callable to the call chain, to be invoked when the results become available. + The result of the current call will be used as the first argument for the next call. + Optional extra arguments can be provided in args and kwargs. + Returns self so you can easily chain then() calls. + """ + with self.valueLock: + if self._FutureResult__ready.isSet(): + call = functools.partial(call, self._FutureResult__value) + self._FutureResult__value = call(*args, **kwargs) + else: + self.callchain.append((call, args, kwargs)) + return self + + def iferror(self, exceptionhandler): + """ + Specify the exception handler to be invoked (with the exception object as only + argument) when asking for the result raises an exception. + If no exception handler is set, any exception result will be silently ignored (unless + you explicitly ask for the value). Returns self so you can easily chain other calls. + """ + self.exceptionhandler = exceptionhandler + return self + + +class _ExceptionWrapper(object): + __doc__ = "Class that wraps a remote exception. If this is returned, Pyro will\n re-throw the exception on the receiving side. Usually this is taken care of\n by a special response message flag, but in the case of batched calls this\n flag is useless and another mechanism was needed." + + def __init__(self, exception): + self.exception = exception + + def raiseIt(self): + from Pyro4.util import fixIronPythonExceptionForPickle + if sys.platform == "cli": + fixIronPythonExceptionForPickle(self.exception, False) + raise self.exception + + def __serialized_dict__(self): + """serialized form as a dictionary""" + from Pyro4.util import SerializerBase + return {'__class__':"Pyro4.futures._ExceptionWrapper", + 'exception':(SerializerBase.class_to_dict)(self.exception)} diff --git a/APPS_UNCOMPILED/lib/Pyro4/message.py b/APPS_UNCOMPILED/lib/Pyro4/message.py new file mode 100644 index 0000000..8576afb --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/message.py @@ -0,0 +1,199 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/message.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 11082 bytes +""" +The pyro wire protocol message. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +import hashlib, hmac, struct, logging, sys, zlib +from Pyro4 import errors, constants +from Pyro4.configuration import config +__all__ = [ + "Message", "secure_compare"] +log = logging.getLogger("Pyro4.message") +MSG_CONNECT = 1 +MSG_CONNECTOK = 2 +MSG_CONNECTFAIL = 3 +MSG_INVOKE = 4 +MSG_RESULT = 5 +MSG_PING = 6 +FLAGS_EXCEPTION = 1 +FLAGS_COMPRESSED = 2 +FLAGS_ONEWAY = 4 +FLAGS_BATCH = 8 +FLAGS_META_ON_CONNECT = 16 +FLAGS_ITEMSTREAMRESULT = 32 +FLAGS_KEEPSERIALIZED = 64 + +class Message(object): + __doc__ = "\n Pyro write protocol message.\n\n Wire messages contains of a fixed size header, an optional set of annotation chunks,\n and then the payload data. This class doesn't deal with the payload data:\n (de)serialization and handling of that data is done elsewhere.\n Annotation chunks are only parsed, except the 'HMAC' chunk: that is created\n and validated because it is used as a message digest.\n\n The header format is::\n\n 4 id ('PYRO')\n 2 protocol version\n 2 message type\n 2 message flags\n 2 sequence number\n 4 data length (i.e. 2 Gb data size limitation)\n 2 data serialization format (serializer id)\n 2 annotations length (total of all chunks, 0 if no annotation chunks present)\n 2 (reserved)\n 2 checksum\n\n After the header, zero or more annotation chunks may follow, of the format::\n\n 4 id (ASCII)\n 2 chunk length\n x annotation chunk databytes\n\n After that, the actual payload data bytes follow.\n\n The sequencenumber is used to check if response messages correspond to the\n actual request message. This prevents the situation where Pyro would perhaps return\n the response data from another remote call (which would not result in an error otherwise!)\n This could happen for instance if the socket data stream gets out of sync, perhaps due To\n some form of signal that interrupts I/O.\n\n The header checksum is a simple sum of the header fields to make reasonably sure\n that we are dealing with an actual correct PYRO protocol header and not some random\n data that happens to start with the 'PYRO' protocol identifier.\n\n Pyro now uses two annotation chunks that you should not touch yourself:\n 'HMAC' contains the hmac digest of the message data bytes and\n all of the annotation chunk data bytes (except those of the HMAC chunk itself).\n 'CORR' contains the correlation id (guid bytes)\n Other chunk names are free to use for custom purposes, but Pyro has the right\n to reserve more of them for internal use in the future.\n " + __slots__ = ['type', 'flags', 'seq', 'data', 'data_size', 'serializer_id', 'annotations', + 'annotations_size', 'hmac_key'] + header_format = "!4sHHHHiHHHH" + header_size = struct.calcsize(header_format) + checksum_magic = 13545 + + def __init__(self, msgType, databytes, serializer_id, flags, seq, annotations=None, hmac_key=None): + self.type = msgType + self.flags = flags + self.seq = seq + self.data = databytes + self.data_size = len(self.data) + self.serializer_id = serializer_id + self.annotations = dict(annotations or {}) + self.hmac_key = hmac_key + if self.hmac_key: + self.annotations["HMAC"] = self.hmac() + self.annotations_size = sum([6 + len(v) for v in self.annotations.values()]) + if 0 < config.MAX_MESSAGE_SIZE < self.data_size + self.annotations_size: + raise errors.MessageTooLargeError("max message size exceeded (%d where max=%d)" % ( + self.data_size + self.annotations_size, config.MAX_MESSAGE_SIZE)) + + def __repr__(self): + return "<%s.%s at %x; type=%d flags=%d seq=%d datasize=%d #ann=%d>" % ( + self.__module__, self.__class__.__name__, id(self), self.type, self.flags, self.seq, self.data_size, len(self.annotations)) + + def to_bytes(self): + """creates a byte stream containing the header followed by annotations (if any) followed by the data""" + return self._Message__header_bytes() + self._Message__annotations_bytes() + self.data + + def __header_bytes(self): + if not 0 <= self.data_size <= 2147483647: + raise ValueError("invalid message size (outside range 0..2Gb)") + checksum = self.type + constants.PROTOCOL_VERSION + self.data_size + self.annotations_size + self.serializer_id + self.flags + self.seq + self.checksum_magic & 65535 + return struct.pack(self.header_format, b'PYRO', constants.PROTOCOL_VERSION, self.type, self.flags, self.seq, self.data_size, self.serializer_id, self.annotations_size, 0, checksum) + + def __annotations_bytes(self): + if self.annotations: + a = [] + for k, v in self.annotations.items(): + if len(k) != 4: + raise errors.ProtocolError("annotation key must be of length 4") + if sys.version_info >= (3, 0): + k = k.encode("ASCII") + a.append(struct.pack("!4sH", k, len(v))) + a.append(v) + + return (b'').join(a) + return b'' + + def send(self, connection): + """send the message as bytes over the connection""" + connection.send(self._Message__header_bytes()) + if self.annotations: + connection.send(self._Message__annotations_bytes()) + connection.send(self.data) + + @classmethod + def from_header(cls, headerData): + """Parses a message header. Does not yet process the annotations chunks and message data.""" + if not headerData or len(headerData) != cls.header_size: + raise errors.ProtocolError("header data size mismatch") + tag, ver, msg_type, flags, seq, data_size, serializer_id, anns_size, _, checksum = struct.unpack(cls.header_format, headerData) + if tag != b'PYRO' or ver != constants.PROTOCOL_VERSION: + raise errors.ProtocolError("invalid data or unsupported protocol version") + if checksum != msg_type + ver + data_size + anns_size + flags + serializer_id + seq + cls.checksum_magic & 65535: + raise errors.ProtocolError("header checksum mismatch") + msg = Message(msg_type, b'', serializer_id, flags, seq) + msg.data_size = data_size + msg.annotations_size = anns_size + return msg + + @classmethod + def recv(cls, connection, requiredMsgTypes=None, hmac_key=None): + """ + Receives a pyro message from a given connection. + Accepts the given message types (None=any, or pass a sequence). + Also reads annotation chunks and the actual payload data. + Validates a HMAC chunk if present. + """ + msg = cls.from_header(connection.recv(cls.header_size)) + msg.hmac_key = hmac_key + if 0 < config.MAX_MESSAGE_SIZE < msg.data_size + msg.annotations_size: + errorMsg = "max message size exceeded (%d where max=%d)" % (msg.data_size + msg.annotations_size, config.MAX_MESSAGE_SIZE) + log.error("connection " + str(connection) + ": " + errorMsg) + connection.close() + exc = errors.MessageTooLargeError(errorMsg) + exc.pyroMsg = msg + raise exc + if requiredMsgTypes: + if msg.type not in requiredMsgTypes: + err = "invalid msg type %d received" % msg.type + log.error(err) + exc = errors.ProtocolError(err) + exc.pyroMsg = msg + raise exc + if msg.annotations_size: + annotations_data = connection.recv(msg.annotations_size) + msg.annotations = {} + i = 0 + while i < msg.annotations_size: + anno, length = struct.unpack("!4sH", annotations_data[i[:i + 6]]) + if sys.version_info >= (3, 0): + anno = anno.decode("ASCII") + msg.annotations[anno] = annotations_data[(i + 6)[:i + 6 + length]] + if sys.platform == "cli": + msg.annotations[anno] = bytes(msg.annotations[anno]) + i += 6 + length + + msg.data = connection.recv(msg.data_size) + if "HMAC" in msg.annotations and hmac_key: + exc = secure_compare(msg.annotations["HMAC"], msg.hmac()) or errors.SecurityError("message hmac mismatch") + exc.pyroMsg = msg + raise exc + else: + if ("HMAC" in msg.annotations) != bool(hmac_key): + err = "hmac key config not symmetric" + log.warning(err) + exc = errors.SecurityError(err) + exc.pyroMsg = msg + raise exc + return msg + + def hmac(self): + """returns the hmac of the data and the annotation chunk values (except HMAC chunk itself)""" + mac = hmac.new((self.hmac_key), (self.data), digestmod=(hashlib.sha1)) + for k, v in sorted(self.annotations.items()): + if k != "HMAC": + mac.update(v) + + if sys.platform != "cli": + return mac.digest() + return bytes(mac.digest()) + + @staticmethod + def ping(pyroConnection, hmac_key=None): + """Convenience method to send a 'ping' message and wait for the 'pong' response""" + ping = Message(MSG_PING, b'ping', 42, 0, 0, hmac_key=hmac_key) + pyroConnection.send(ping.to_bytes()) + Message.recv(pyroConnection, [MSG_PING]) + + def decompress_if_needed(self): + """Decompress the message data if it is compressed.""" + if self.flags & FLAGS_COMPRESSED: + self.data = zlib.decompress(self.data) + self.flags &= ~FLAGS_COMPRESSED + self.data_size = len(self.data) + return self + + +try: + from hmac import compare_digest as secure_compare +except ImportError: + import operator + try: + reduce + except NameError: + from functools import reduce + + def secure_compare(a, b): + if type(a) != type(b): + raise TypeError("arguments must both be same type") + if len(a) != len(b): + return False + return reduce(operator.and_, map(operator.eq, a, b), True) diff --git a/APPS_UNCOMPILED/lib/Pyro4/naming.py b/APPS_UNCOMPILED/lib/Pyro4/naming.py new file mode 100644 index 0000000..35f1c43 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/naming.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/naming.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 25442 bytes diff --git a/APPS_UNCOMPILED/lib/Pyro4/naming_storage.py b/APPS_UNCOMPILED/lib/Pyro4/naming_storage.py new file mode 100644 index 0000000..ec3ede9 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/naming_storage.py @@ -0,0 +1,496 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/naming_storage.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 18133 bytes +""" +Name Server persistent storage implementations. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +import re, logging, sys, threading +if sys.version_info <= (3, 4): + from collections import MutableMapping +else: + from collections.abc import MutableMapping +from contextlib import closing +from Pyro4.errors import NamingError +try: + import anydbm as dbm +except ImportError: + try: + import dbm + except ImportError: + dbm = None + +except Exception as x: + try: + dbm = None + finally: + x = None + del x + +try: + import sqlite3 +except ImportError: + sqlite3 = None + +log = logging.getLogger("Pyro4.naming_storage") + +class SqlStorage(MutableMapping): + __doc__ = "\n Sqlite-based storage.\n It is just a single (name,uri) table for the names and another table for the metadata.\n Sqlite db connection objects aren't thread-safe, so a new connection is created in every method.\n " + + def __init__(self, dbfile): + if dbfile == ":memory:": + raise ValueError("We don't support the sqlite :memory: database type. Just use the default volatile in-memory store.") + self.dbfile = dbfile + with closing(sqlite3.connect(dbfile)) as db: + db.execute("PRAGMA foreign_keys=ON") + try: + db.execute("SELECT COUNT(*) FROM pyro_names").fetchone() + except sqlite3.OperationalError: + self._create_schema(db) + else: + try: + db.execute("SELECT COUNT(*) FROM pyro_metadata").fetchone() + except sqlite3.OperationalError: + db.execute("ALTER TABLE pyro_names RENAME TO pyro_names_old") + self._create_schema(db) + db.execute("INSERT INTO pyro_names(name, uri) SELECT name, uri FROM pyro_names_old") + db.execute("DROP TABLE pyro_names_old") + + db.commit() + + def _create_schema(self, db): + db.execute("CREATE TABLE pyro_names\n (\n id integer PRIMARY KEY,\n name nvarchar NOT NULL UNIQUE,\n uri nvarchar NOT NULL\n );") + db.execute("CREATE TABLE pyro_metadata\n (\n object integer NOT NULL,\n metadata nvarchar NOT NULL,\n FOREIGN KEY(object) REFERENCES pyro_names(id)\n );") + + def __getattr__(self, item): + raise NotImplementedError("SqlStorage doesn't implement method/attribute '" + item + "'") + + def __getitem__(self, item): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + result = db.execute("SELECT id, uri FROM pyro_names WHERE name=?", (item,)).fetchone() + if result: + dbid, uri = result + metadata = {m[0] for m in db.execute("SELECT metadata FROM pyro_metadata WHERE object=?", (dbid,)).fetchall()} + return (uri, metadata) + raise KeyError(item) + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in getitem: " + str(e)) + finally: + e = None + del e + + def __setitem__(self, key, value): + uri, metadata = value + try: + with closing(sqlite3.connect(self.dbfile)) as db: + cursor = db.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + dbid = cursor.execute("SELECT id FROM pyro_names WHERE name=?", (key,)).fetchone() + if dbid: + dbid = dbid[0] + cursor.execute("DELETE FROM pyro_metadata WHERE object=?", (dbid,)) + cursor.execute("DELETE FROM pyro_names WHERE id=?", (dbid,)) + cursor.execute("INSERT INTO pyro_names(name, uri) VALUES(?,?)", (key, uri)) + if metadata: + object_id = cursor.lastrowid + for m in metadata: + cursor.execute("INSERT INTO pyro_metadata(object, metadata) VALUES (?,?)", (object_id, m)) + + cursor.close() + db.commit() + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in setitem: " + str(e)) + finally: + e = None + del e + + def __len__(self): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + return db.execute("SELECT count(*) FROM pyro_names").fetchone()[0] + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in len: " + str(e)) + finally: + e = None + del e + + def __contains__(self, item): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + return db.execute("SELECT EXISTS(SELECT 1 FROM pyro_names WHERE name=? LIMIT 1)", (item,)).fetchone()[0] + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in contains: " + str(e)) + finally: + e = None + del e + + def __delitem__(self, key): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + db.execute("PRAGMA foreign_keys=ON") + dbid = db.execute("SELECT id FROM pyro_names WHERE name=?", (key,)).fetchone() + if dbid: + dbid = dbid[0] + db.execute("DELETE FROM pyro_metadata WHERE object=?", (dbid,)) + db.execute("DELETE FROM pyro_names WHERE id=?", (dbid,)) + db.commit() + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in delitem: " + str(e)) + finally: + e = None + del e + + def __iter__(self): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + result = db.execute("SELECT name FROM pyro_names") + return iter([n[0] for n in result.fetchall()]) + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in iter: " + str(e)) + finally: + e = None + del e + + def clear(self): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + db.execute("PRAGMA foreign_keys=ON") + db.execute("DELETE FROM pyro_metadata") + db.execute("DELETE FROM pyro_names") + db.commit() + with closing(sqlite3.connect((self.dbfile), isolation_level=None)) as db: + db.execute("VACUUM") + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in clear: " + str(e)) + finally: + e = None + del e + + def optimized_prefix_list(self, prefix, return_metadata=False): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + names = {} + if return_metadata: + for dbid, name, uri in db.execute("SELECT id, name, uri FROM pyro_names WHERE name LIKE ?", (prefix + "%",)).fetchall(): + metadata = {m[0] for m in db.execute("SELECT metadata FROM pyro_metadata WHERE object=?", (dbid,)).fetchall()} + names[name] = (uri, metadata) + + else: + for name, uri in db.execute("SELECT name, uri FROM pyro_names WHERE name LIKE ?", (prefix + "%",)).fetchall(): + names[name] = uri + + return names + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in optimized_prefix_list: " + str(e)) + finally: + e = None + del e + + def optimized_regex_list(self, regex, return_metadata=False): + pass + + def optimized_metadata_search(self, metadata_all=None, metadata_any=None, return_metadata=False): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + if metadata_any: + params = list(metadata_any) + sql = "SELECT id, name, uri FROM pyro_names WHERE id IN (SELECT object FROM pyro_metadata WHERE metadata IN ({seq}))".format(seq=(",".join(["?"] * len(metadata_any)))) + else: + params = list(metadata_all) + params.append(len(metadata_all)) + sql = "SELECT id, name, uri FROM pyro_names WHERE id IN (SELECT object FROM pyro_metadata WHERE metadata IN ({seq}) GROUP BY object HAVING COUNT(metadata)=?)".format(seq=(",".join(["?"] * len(metadata_all)))) + result = db.execute(sql, params).fetchall() + if return_metadata: + names = {} + for dbid, name, uri in result: + metadata = {m[0] for m in db.execute("SELECT metadata FROM pyro_metadata WHERE object=?", (dbid,)).fetchall()} + names[name] = (uri, metadata) + + else: + names = {name: uri for dbid, name, uri in result} + return names + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in optimized_metadata_search: " + str(e)) + finally: + e = None + del e + + def remove_items(self, items): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + db.execute("PRAGMA foreign_keys=ON") + for item in items: + dbid = db.execute("SELECT id FROM pyro_names WHERE name=?", (item,)).fetchone() + if dbid: + dbid = dbid[0] + db.execute("DELETE FROM pyro_metadata WHERE object=?", (dbid,)) + db.execute("DELETE FROM pyro_names WHERE id=?", (dbid,)) + + db.commit() + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in remove_items: " + str(e)) + finally: + e = None + del e + + def everything(self, return_metadata=False): + try: + with closing(sqlite3.connect(self.dbfile)) as db: + names = {} + if return_metadata: + for dbid, name, uri in db.execute("SELECT id, name, uri FROM pyro_names").fetchall(): + metadata = {m[0] for m in db.execute("SELECT metadata FROM pyro_metadata WHERE object=?", (dbid,)).fetchall()} + names[name] = (uri, metadata) + + else: + for name, uri in db.execute("SELECT name, uri FROM pyro_names").fetchall(): + names[name] = uri + + return names + except sqlite3.DatabaseError as e: + try: + raise NamingError("sqlite error in everything: " + str(e)) + finally: + e = None + del e + + def close(self): + pass + + +class DbmStorage(MutableMapping): + __doc__ = "\n Storage implementation that uses a persistent dbm file.\n Because dbm only supports strings as key/value, we encode/decode them in utf-8.\n Dbm files cannot be accessed concurrently, so a strict concurrency model\n is used where only one operation is processed at the same time\n (this is very slow when compared to the in-memory storage)\n DbmStorage does NOT support storing metadata! It only accepts empty metadata,\n and always returns empty metadata.\n " + + def __init__(self, dbmfile): + self.dbmfile = dbmfile + db = dbm.open((self.dbmfile), "c", mode=384) + db.close() + self.lock = threading.Lock() + + def __getattr__(self, item): + raise NotImplementedError("DbmStorage doesn't implement method/attribute '" + item + "'") + + def __getitem__(self, item): + item = item.encode("utf-8") + with self.lock: + try: + with closing(dbm.open(self.dbmfile)) as db: + return ( + db[item].decode("utf-8"), frozenset()) + except dbm.error as e: + try: + raise NamingError("dbm error in getitem: " + str(e)) + finally: + e = None + del e + + def __setitem__(self, key, value): + uri, metadata = value + if metadata: + log.warning("DbmStorage doesn't support metadata, silently discarded") + key = key.encode("utf-8") + uri = uri.encode("utf-8") + with self.lock: + try: + with closing(dbm.open(self.dbmfile, "w")) as db: + db[key] = uri + except dbm.error as e: + try: + raise NamingError("dbm error in setitem: " + str(e)) + finally: + e = None + del e + + def __len__(self): + with self.lock: + try: + with closing(dbm.open(self.dbmfile)) as db: + return len(db) + except dbm.error as e: + try: + raise NamingError("dbm error in len: " + str(e)) + finally: + e = None + del e + + def __contains__(self, item): + item = item.encode("utf-8") + with self.lock: + try: + with closing(dbm.open(self.dbmfile)) as db: + return item in db + except dbm.error as e: + try: + raise NamingError("dbm error in contains: " + str(e)) + finally: + e = None + del e + + def __delitem__(self, key): + key = key.encode("utf-8") + with self.lock: + try: + with closing(dbm.open(self.dbmfile, "w")) as db: + del db[key] + except dbm.error as e: + try: + raise NamingError("dbm error in delitem: " + str(e)) + finally: + e = None + del e + + def __iter__(self): + with self.lock: + try: + with closing(dbm.open(self.dbmfile)) as db: + return iter([key.decode("utf-8") for key in db.keys()]) + except dbm.error as e: + try: + raise NamingError("dbm error in iter: " + str(e)) + finally: + e = None + del e + + def clear(self): + with self.lock: + try: + with closing(dbm.open(self.dbmfile, "w")) as db: + if hasattr(db, "clear"): + db.clear() + else: + for key in db.keys(): + del db[key] + + except dbm.error as e: + try: + raise NamingError("dbm error in clear: " + str(e)) + finally: + e = None + del e + + def optimized_prefix_list(self, prefix, return_metadata=False): + with self.lock: + try: + with closing(dbm.open(self.dbmfile)) as db: + result = {} + if hasattr(db, "items"): + for key, value in db.items(): + key = key.decode("utf-8") + if key.startswith(prefix): + uri = value.decode("utf-8") + result[key] = (uri, frozenset()) if return_metadata else uri + + else: + for key in db.keys(): + keystr = key.decode("utf-8") + if keystr.startswith(prefix): + uri = db[key].decode("utf-8") + result[keystr] = (uri, frozenset()) if return_metadata else uri + + return result + except dbm.error as e: + try: + raise NamingError("dbm error in optimized_prefix_list: " + str(e)) + finally: + e = None + del e + + def optimized_regex_list(self, regex, return_metadata=False): + try: + regex = re.compile(regex + "$") + except re.error as x: + try: + raise NamingError("invalid regex: " + str(x)) + finally: + x = None + del x + + with self.lock: + try: + with closing(dbm.open(self.dbmfile)) as db: + result = {} + if hasattr(db, "items"): + for key, value in db.items(): + key = key.decode("utf-8") + if regex.match(key): + uri = value.decode("utf-8") + result[key] = (uri, frozenset()) if return_metadata else uri + + else: + for key in db.keys(): + keystr = key.decode("utf-8") + if regex.match(keystr): + uri = db[key].decode("utf-8") + result[keystr] = (uri, frozenset()) if return_metadata else uri + + return result + except dbm.error as e: + try: + raise NamingError("dbm error in optimized_regex_list: " + str(e)) + finally: + e = None + del e + + def optimized_metadata_search(self, metadata_all=None, metadata_any=None, return_metadata=False): + if metadata_all or metadata_any: + raise NamingError("DbmStorage doesn't support metadata") + return self.everything(return_metadata) + + def remove_items(self, items): + with self.lock: + try: + with closing(dbm.open(self.dbmfile, "w")) as db: + for item in items: + try: + del db[item.encode("utf-8")] + except KeyError: + pass + + except dbm.error as e: + try: + raise NamingError("dbm error in remove_items: " + str(e)) + finally: + e = None + del e + + def everything(self, return_metadata=False): + with self.lock: + try: + with closing(dbm.open(self.dbmfile)) as db: + result = {} + if hasattr(db, "items"): + for key, value in db.items(): + uri = value.decode("utf-8") + result[key.decode("utf-8")] = (uri, frozenset()) if return_metadata else uri + + else: + for key in db.keys(): + uri = db[key].decode("utf-8") + result[key.decode("utf-8")] = (uri, frozenset()) if return_metadata else uri + + return result + except dbm.error as e: + try: + raise NamingError("dbm error in everything: " + str(e)) + finally: + e = None + del e + + def close(self): + pass diff --git a/APPS_UNCOMPILED/lib/Pyro4/nsc.py b/APPS_UNCOMPILED/lib/Pyro4/nsc.py new file mode 100644 index 0000000..ed5f326 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/nsc.py @@ -0,0 +1,163 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/nsc.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5712 bytes +""" +Name server control tool. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import sys, os, warnings +from Pyro4 import errors, naming +if sys.version_info < (3, 0): + input = raw_input + +def handleCommand(nameserver, options, args): + + def printListResult(resultdict, title=''): + print("--------START LIST %s" % title) + for name, (uri, metadata) in sorted(resultdict.items()): + print("%s --> %s" % (name, uri)) + if metadata: + print(" metadata:", metadata) + + print("--------END LIST %s" % title) + + def cmd_ping(): + nameserver.ping() + print("Name server ping ok.") + + def cmd_listprefix(): + if len(args) == 1: + printListResult(nameserver.list(return_metadata=True)) + else: + printListResult(nameserver.list(prefix=(args[1]), return_metadata=True), "- prefix '%s'" % args[1]) + + def cmd_listregex(): + if len(args) != 2: + raise SystemExit("requires one argument: pattern") + printListResult(nameserver.list(regex=(args[1]), return_metadata=True), "- regex '%s'" % args[1]) + + def cmd_lookup(): + if len(args) != 2: + raise SystemExit("requires one argument: name") + uri, metadata = nameserver.lookup((args[1]), return_metadata=True) + print(uri) + if metadata: + print("metadata:", metadata) + + def cmd_register(): + if len(args) != 3: + raise SystemExit("requires two arguments: name uri") + nameserver.register((args[1]), (args[2]), safe=True) + print("Registered %s" % args[1]) + + def cmd_remove(): + if len(args) != 2: + raise SystemExit("requires one argument: name") + else: + count = nameserver.remove(args[1]) + if count > 0: + print("Removed %s" % args[1]) + else: + print("Nothing removed") + + def cmd_removeregex(): + if len(args) != 2: + raise SystemExit("requires one argument: pattern") + sure = input("Potentially removing lots of items from the Name server. Are you sure (y/n)?").strip() + if sure in ('y', 'Y'): + count = nameserver.remove(regex=(args[1])) + print("%d items removed." % count) + + def cmd_setmeta(): + if len(args) < 2: + raise SystemExit("requires at least 2 arguments: uri and zero or more meta tags") + else: + metadata = set(args[2[:None]]) + nameserver.set_metadata(args[1], metadata) + if metadata: + print("Metadata updated") + else: + print("Metadata cleared") + + def cmd_listmeta_all(): + if len(args) < 2: + raise SystemExit("requires at least one metadata tag argument") + metadata = set(args[1[:None]]) + printListResult(nameserver.list(metadata_all=metadata, return_metadata=True), " - searched by metadata") + + def cmd_listmeta_any(): + if len(args) < 2: + raise SystemExit("requires at least one metadata tag argument") + metadata = set(args[1[:None]]) + printListResult(nameserver.list(metadata_any=metadata, return_metadata=True), " - searched by metadata") + + commands = { + 'ping': cmd_ping, + 'list': cmd_listprefix, + 'listmatching': cmd_listregex, + 'listmeta_all': cmd_listmeta_all, + 'listmeta_any': cmd_listmeta_any, + 'lookup': cmd_lookup, + 'register': cmd_register, + 'remove': cmd_remove, + 'removematching': cmd_removeregex, + 'setmeta': cmd_setmeta} + try: + commands[args[0]]() + except Exception as x: + try: + print("Error: %s - %s" % (type(x).__name__, x)) + finally: + x = None + del x + + +def main(args=None): + from optparse import OptionParser + usage = "usage: %prog [options] command [arguments]\nCommands: register remove removematching lookup list listmatching\n listmeta_all listmeta_any setmeta ping" + parser = OptionParser(usage=usage) + parser.add_option("-n", "--host", dest="host", help="hostname of the NS") + parser.add_option("-p", "--port", dest="port", type="int", help="port of the NS (or bc-port if host isn't specified)") + parser.add_option("-u", "--unixsocket", help="Unix domain socket name of the NS") + parser.add_option("-k", "--key", help="the HMAC key to use (deprecated)") + parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="verbose output") + options, args = parser.parse_args(args) + if options.key: + warnings.warn("using -k to supply HMAC key on the command line is a security problem and is deprecated since Pyro 4.72. See the documentation for an alternative.") + if "PYRO_HMAC_KEY" in os.environ: + if options.key: + raise SystemExit("error: don't use -k and PYRO_HMAC_KEY at the same time") + options.key = os.environ["PYRO_HMAC_KEY"] + if not args or args[0] not in ('register', 'remove', 'removematching', 'list', + 'listmatching', 'lookup', 'listmeta_all', 'listmeta_any', + 'setmeta', 'ping'): + parser.error("invalid or missing command") + if options.verbose: + print("Locating name server...") + if options.unixsocket: + options.host = "./u:" + options.unixsocket + try: + nameserver = naming.locateNS((options.host), (options.port), hmac_key=(options.key)) + except errors.PyroError as x: + try: + print("Error: %s" % x) + return + finally: + x = None + del x + + if options.verbose: + print("Name server found: %s" % nameserver._pyroUri) + handleCommand(nameserver, options, args) + if options.verbose: + print("Done.") + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/lib/Pyro4/socketserver/__init__.py b/APPS_UNCOMPILED/lib/Pyro4/socketserver/__init__.py new file mode 100644 index 0000000..18e7a76 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/socketserver/__init__.py @@ -0,0 +1,13 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/socketserver/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 128 bytes +""" +Package for the various server types. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +pass diff --git a/APPS_UNCOMPILED/lib/Pyro4/socketserver/existingconnectionserver.py b/APPS_UNCOMPILED/lib/Pyro4/socketserver/existingconnectionserver.py new file mode 100644 index 0000000..7aa75b8 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/socketserver/existingconnectionserver.py @@ -0,0 +1,121 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/socketserver/existingconnectionserver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4127 bytes +""" +Socket server for a the special case of a single, already existing, connection. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import socket, sys, logging, ssl +from Pyro4 import socketutil, errors, util +from Pyro4.configuration import config +log = logging.getLogger("Pyro4.existingconnectionserver") + +class SocketServer_ExistingConnection(object): + + def __init__(self): + self.sock = self.daemon = self.locationStr = self.conn = None + self.shutting_down = False + + def init(self, daemon, connected_socket): + connected_socket.getpeername() + if config.SSL: + if not isinstance(connected_socket, ssl.SSLSocket): + raise socket.error("SSL configured for Pyro but existing socket is not a SSL socket") + else: + self.daemon = daemon + self.sock = connected_socket + log.info("starting server on user-supplied connected socket " + str(connected_socket)) + sn = connected_socket.getsockname() + if hasattr(socket, "AF_UNIX") and connected_socket.family == socket.AF_UNIX: + self.locationStr = "./u:" + (sn or "<>") + else: + host, port = sn[None[:2]] + if ":" in host: + self.locationStr = "[%s]:%d" % (host, port) + else: + self.locationStr = "%s:%d" % (host, port) + self.conn = socketutil.SocketConnection(connected_socket) + + def __repr__(self): + return "<%s on %s>" % (self.__class__.__name__, self.locationStr) + + def __del__(self): + if self.sock is not None: + self.sock = None + self.conn = None + + @property + def selector(self): + raise TypeError("single-connection server doesn't have multiplexing selector") + + @property + def sockets(self): + return [self.sock] + + def combine_loop(self, server): + raise errors.PyroError("cannot combine servers when using user-supplied connected socket") + + def events(self, eventsockets): + raise errors.PyroError("cannot combine events when using user-supplied connected socket") + + def shutdown(self): + self.shutting_down = True + self.close() + self.sock = None + self.conn = None + + def close(self): + self.sock = None + self.conn = None + + def handleRequest(self): + """Handles a single connection request event and returns if the connection is still active""" + try: + self.daemon.handleRequest(self.conn) + return True + except (socket.error, errors.ConnectionClosedError, errors.SecurityError) as x: + try: + try: + peername = self.conn.sock.getpeername() + log.debug("disconnected %s", peername) + except socket.error: + log.debug("disconnected a client") + + self.shutdown() + return False + finally: + x = None + del x + + except errors.TimeoutError as x: + try: + log.warning("error during handleRequest: %s" % x) + return False + finally: + x = None + del x + + except: + ex_t, ex_v, ex_tb = sys.exc_info() + tb = util.formatTraceback(ex_t, ex_v, ex_tb) + msg = "error during handleRequest: %s; %s" % (ex_v, "".join(tb)) + log.warning(msg) + return False + + def loop(self, loopCondition=(lambda: True)): + log.debug("entering requestloop") + while loopCondition() and self.sock: + try: + self.handleRequest() + self.daemon._housekeeping() + except socket.timeout: + pass + except KeyboardInterrupt: + log.debug("stopping on break signal") + break diff --git a/APPS_UNCOMPILED/lib/Pyro4/socketserver/multiplexserver.py b/APPS_UNCOMPILED/lib/Pyro4/socketserver/multiplexserver.py new file mode 100644 index 0000000..e763b9b --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/socketserver/multiplexserver.py @@ -0,0 +1,221 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/socketserver/multiplexserver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 9876 bytes +""" +Socket server based on socket multiplexing. Doesn't use threads. +Uses the best available selector (kqueue, poll, select). + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import socket, time, sys, logging, os +from collections import defaultdict +from Pyro4 import socketutil, errors, util +from Pyro4.configuration import config +if sys.version_info >= (3, 5): + import selectors +else: + try: + import selectors2 as selectors + except ImportError: + if sys.version_info >= (3, 4): + import selectors + else: + try: + import selectors34 as selectors + except ImportError: + selectors = None + + log = logging.getLogger("Pyro4.multiplexserver") + + class SocketServer_Multiplex(object): + __doc__ = "Multiplexed transport server for socket connections (uses select, poll, kqueue, ...)" + + def __init__(self): + self.sock = self.daemon = self.locationStr = None + if selectors is None: + raise RuntimeError("This Python installation doesn't have the 'selectors2' or 'selectors34' module installed, which is required to use Pyro's multiplex server. Install it, or use the threadpool server instead.") + self.selector = selectors.DefaultSelector() + self.shutting_down = False + + def init(self, daemon, host, port, unixsocket=None): + log.info("starting multiplexed socketserver") + log.debug("selector implementation: %s.%s", self.selector.__class__.__module__, self.selector.__class__.__name__) + self.sock = None + bind_location = unixsocket if unixsocket else (host, port) + if config.SSL: + sslContext = socketutil.getSSLcontext(servercert=(config.SSL_SERVERCERT), serverkey=(config.SSL_SERVERKEY), + keypassword=(config.SSL_SERVERKEYPASSWD), + cacerts=(config.SSL_CACERTS)) + log.info("using SSL, cert=%s key=%s cacerts=%s", config.SSL_SERVERCERT, config.SSL_SERVERKEY, config.SSL_CACERTS) + else: + sslContext = None + log.info("not using SSL") + self.sock = socketutil.createSocket(bind=bind_location, reuseaddr=(config.SOCK_REUSE), + timeout=(config.COMMTIMEOUT), + noinherit=True, + nodelay=(config.SOCK_NODELAY), + sslContext=sslContext) + self.daemon = daemon + self._socketaddr = sockaddr = self.sock.getsockname() + if not unixsocket: + if sockaddr[0].startswith("127."): + if (host is None or host.lower()) != "localhost" and not host.startswith("127."): + log.warning("weird DNS setup: %s resolves to localhost (127.x.x.x)", host) + elif unixsocket: + self.locationStr = "./u:" + unixsocket + else: + host = host or sockaddr[0] + port = port or sockaddr[1] + if ":" in host: + self.locationStr = "[%s]:%d" % (host, port) + else: + self.locationStr = "%s:%d" % (host, port) + self.selector.register(self.sock, selectors.EVENT_READ, self) + + def __repr__(self): + return "<%s on %s; %d connections>" % (self.__class__.__name__, self.locationStr, len(self.selector.get_map()) - 1) + + def __del__(self): + if self.sock is not None: + self.selector.close() + self.sock.close() + self.sock = None + + def eventsParse error at or near `COME_FROM' instruction at offset 170_0 + + def _handleConnection(self, sock): + try: + if sock is None: + return + else: + csock, caddr = sock.accept() + if hasattr(csock, "getpeercert"): + log.debug("connected %s - SSL", caddr) + else: + log.debug("connected %s - unencrypted", caddr) + if config.COMMTIMEOUT: + csock.settimeout(config.COMMTIMEOUT) + except (socket.error, OSError) as x: + try: + err = getattr(x, "errno", x.args[0]) + if err in socketutil.ERRNO_BADF or err in socketutil.ERRNO_ENOTSOCK: + raise errors.ConnectionClosedError("server socket closed") + err = getattr(x, "errno", x.args[0]) + log.warning("accept() failed '%s' with errno=%d, shouldn't happen", x, err) + return + finally: + x = None + del x + + try: + conn = socketutil.SocketConnection(csock) + if self.daemon._handshake(conn): + return conn + conn.close() + except: + ex_t, ex_v, ex_tb = sys.exc_info() + tb = util.formatTraceback(ex_t, ex_v, ex_tb) + log.warning("error during connect/handshake: %s; %s", ex_v, "\n".join(tb)) + try: + csock.shutdown(socket.SHUT_RDWR) + except (OSError, socket.error): + pass + + csock.close() + + def shutdown(self): + self.shutting_down = True + self.wakeup() + time.sleep(0.05) + self.close() + self.sock = None + + def close(self): + self.selector.close() + if self.sock: + sockname = None + try: + sockname = self.sock.getsockname() + except (socket.error, OSError): + pass + + self.sock.close() + if type(sockname) is str: + if os.path.exists(sockname): + os.remove(sockname) + self.sock = None + + @property + def sockets(self): + registrations = self.selector.get_map() + if registrations: + return [sk.fileobj for sk in registrations.values()] + return [] + + def wakeup(self): + """bit of a hack to trigger a blocking server to get out of the loop, useful at clean shutdowns""" + socketutil.interruptSocket(self._socketaddr) + + def handleRequest(self, conn): + """Handles a single connection request event and returns if the connection is still active""" + try: + self.daemon.handleRequest(conn) + return True + except (socket.error, errors.ConnectionClosedError, errors.SecurityError): + try: + peername = conn.sock.getpeername() + log.debug("disconnected %s", peername) + except socket.error: + log.debug("disconnected a client") + + return False + except errors.TimeoutError as x: + try: + log.warning("error during handleRequest: %s" % x) + return False + finally: + x = None + del x + + except: + ex_t, ex_v, ex_tb = sys.exc_info() + tb = util.formatTraceback(ex_t, ex_v, ex_tb) + msg = "error during handleRequest: %s; %s" % (ex_v, "".join(tb)) + log.warning(msg) + return False + + def loop(self, loopCondition=(lambda: True)): + log.debug("entering multiplexed requestloop") + while loopCondition(): + try: + try: + events = self.selector.select(config.POLLTIMEOUT) + except OSError: + events = [] + + events_per_server = defaultdict(list) + for key, mask in events: + if mask & selectors.EVENT_READ: + events_per_server[key.data].append(key.fileobj) + + for server, fileobjs in events_per_server.items(): + server.events(fileobjs) + + if not events_per_server: + self.daemon._housekeeping() + except socket.timeout: + pass + except KeyboardInterrupt: + log.debug("stopping on break signal") + break + + def combine_loop(self, server): + for sock in server.sockets: + self.selector.register(sock, selectors.EVENT_READ, server) + + server.selector = self.selector \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/Pyro4/socketserver/threadpool.py b/APPS_UNCOMPILED/lib/Pyro4/socketserver/threadpool.py new file mode 100644 index 0000000..65c707e --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/socketserver/threadpool.py @@ -0,0 +1,142 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/socketserver/threadpool.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4327 bytes +""" +Thread pool job processor with variable number of worker threads (between max/min amount). + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import with_statement +import time, logging, threading +from Pyro4.configuration import config +log = logging.getLogger("Pyro4.threadpool") + +class PoolError(Exception): + pass + + +class NoFreeWorkersError(PoolError): + pass + + +class Worker(threading.Thread): + + def __init__(self, pool): + super(Worker, self).__init__() + self.daemon = True + self.name = "Pyro-Worker-%d" % id(self) + self.job_available = threading.Event() + self.job = None + self.pool = pool + + def process(self, job): + self.job = job + self.job_available.set() + + def run(self): + while True: + self.job_available.wait() + self.job_available.clear() + if self.job is None: + break + try: + self.job() + except Exception as x: + try: + log.exception("unhandled exception from job in worker thread %s: %s", self.name, x) + finally: + x = None + del x + + self.job = None + self.pool.notify_done(self) + + self.pool = None + + +class Pool(object): + __doc__ = "\n A job processing pool that is using a pool of worker threads.\n The amount of worker threads in the pool is configurable and scales between min/max size.\n " + + def __init__(self): + if config.THREADPOOL_SIZE < 1 or config.THREADPOOL_SIZE_MIN < 1: + raise ValueError("threadpool sizes must be greater than zero") + if config.THREADPOOL_SIZE_MIN > config.THREADPOOL_SIZE: + raise ValueError("minimum threadpool size must be less than or equal to max size") + self.idle = set() + self.busy = set() + self.closed = False + for _ in range(config.THREADPOOL_SIZE_MIN): + worker = Worker(self) + self.idle.add(worker) + worker.start() + + log.debug("worker pool created with initial size %d", self.num_workers()) + self.count_lock = threading.Lock() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self): + if not self.closed: + log.debug("closing down") + for w in list(self.busy): + w.process(None) + + for w in list(self.idle): + w.process(None) + + self.closed = True + time.sleep(0.1) + idle, self.idle = self.idle, set() + busy, self.busy = self.busy, set() + current_thread = threading.current_thread() + while idle: + p = idle.pop() + if p is not current_thread: + p.join(timeout=0.1) + + while busy: + p = busy.pop() + if p is not current_thread: + p.join(timeout=0.1) + + def __repr__(self): + return "<%s.%s at 0x%x; %d busy workers; %d idle workers>" % ( + self.__class__.__module__, self.__class__.__name__, id(self), len(self.busy), len(self.idle)) + + def num_workers(self): + return len(self.busy) + len(self.idle) + + def process(self, job): + if self.closed: + raise PoolError("job queue is closed") + elif self.idle: + worker = self.idle.pop() + else: + if self.num_workers() < config.THREADPOOL_SIZE: + worker = Worker(self) + worker.start() + else: + raise NoFreeWorkersError("no free workers available, increase thread pool size") + self.busy.add(worker) + worker.process(job) + log.debug("worker counts: %d busy, %d idle", len(self.busy), len(self.idle)) + + def notify_done(self, worker): + if worker in self.busy: + self.busy.remove(worker) + elif self.closed: + worker.process(None) + return + if len(self.idle) >= config.THREADPOOL_SIZE_MIN: + worker.process(None) + else: + self.idle.add(worker) + log.debug("worker counts: %d busy, %d idle", len(self.busy), len(self.idle)) diff --git a/APPS_UNCOMPILED/lib/Pyro4/socketserver/threadpoolserver.py b/APPS_UNCOMPILED/lib/Pyro4/socketserver/threadpoolserver.py new file mode 100644 index 0000000..ebbab95 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/socketserver/threadpoolserver.py @@ -0,0 +1,262 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/socketserver/threadpoolserver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 10057 bytes +""" +Socket server based on a worker thread pool. Doesn't use select. + +Uses a single worker thread per client connection. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import socket, logging, sys, time, threading, os +from Pyro4 import socketutil, errors, util +from Pyro4.configuration import config +from .threadpool import Pool, NoFreeWorkersError +from .multiplexserver import selectors +log = logging.getLogger("Pyro4.threadpoolserver") +_client_disconnect_lock = threading.Lock() + +class ClientConnectionJob(object): + __doc__ = "\n Takes care of a single client connection and all requests\n that may arrive during its life span.\n " + + def __init__(self, clientSocket, clientAddr, daemon): + self.csock = socketutil.SocketConnection(clientSocket) + self.caddr = clientAddr + self.daemon = daemon + + def __call__(self): + if self.handleConnection(): + try: + while True: + try: + self.daemon.handleRequest(self.csock) + except (socket.error, errors.ConnectionClosedError): + log.debug("disconnected %s", self.caddr) + break + except errors.SecurityError: + log.debug("security error on client %s", self.caddr) + break + except errors.TimeoutError as x: + try: + log.warning("error during handleRequest: %s" % x) + break + finally: + x = None + del x + + except: + ex_t, ex_v, ex_tb = sys.exc_info() + tb = util.formatTraceback(ex_t, ex_v, ex_tb) + msg = "error during handleRequest: %s; %s" % (ex_v, "".join(tb)) + log.warning(msg) + break + + finally: + with _client_disconnect_lock: + try: + self.daemon._clientDisconnect(self.csock) + except Exception as x: + try: + log.warning("Error in clientDisconnect: " + str(x)) + finally: + x = None + del x + + self.csock.close() + + def handleConnection(self): + try: + if self.daemon._handshake(self.csock): + return True + self.csock.close() + except: + ex_t, ex_v, ex_tb = sys.exc_info() + tb = util.formatTraceback(ex_t, ex_v, ex_tb) + log.warning("error during connect/handshake: %s; %s", ex_v, "\n".join(tb)) + self.csock.close() + + return False + + def denyConnection(self, reason): + log.warning("client connection was denied: " + reason) + self.daemon._handshake((self.csock), denied_reason=reason) + self.csock.close() + + +class Housekeeper(threading.Thread): + + def __init__(self, daemon): + super(Housekeeper, self).__init__(name="housekeeper") + self.pyroDaemon = daemon + self.stop = threading.Event() + self.daemon = True + self.waittime = min(config.POLLTIMEOUT or 0, max(config.COMMTIMEOUT or 0, 5)) + + def run(self): + while True: + if self.stop.wait(self.waittime): + break + self.pyroDaemon._housekeeping() + + +class SocketServer_Threadpool(object): + __doc__ = "transport server for socket connections, worker thread pool version." + + def __init__(self): + self.daemon = self.sock = self._socketaddr = self.locationStr = self.pool = None + self.shutting_down = False + self.housekeeper = None + self._selector = selectors.DefaultSelector() if selectors else None + + def init(self, daemon, host, port, unixsocket=None): + log.info("starting thread pool socketserver") + self.daemon = daemon + self.sock = None + bind_location = unixsocket if unixsocket else (host, port) + if config.SSL: + sslContext = socketutil.getSSLcontext(servercert=(config.SSL_SERVERCERT), serverkey=(config.SSL_SERVERKEY), + keypassword=(config.SSL_SERVERKEYPASSWD), + cacerts=(config.SSL_CACERTS)) + log.info("using SSL, cert=%s key=%s cacerts=%s", config.SSL_SERVERCERT, config.SSL_SERVERKEY, config.SSL_CACERTS) + else: + sslContext = None + log.info("not using SSL") + self.sock = socketutil.createSocket(bind=bind_location, reuseaddr=(config.SOCK_REUSE), + timeout=(config.COMMTIMEOUT), + noinherit=True, + nodelay=(config.SOCK_NODELAY), + sslContext=sslContext) + self._socketaddr = self.sock.getsockname() + if not unixsocket: + if self._socketaddr[0].startswith("127."): + if (host is None or host.lower()) != "localhost" and not host.startswith("127."): + log.warning("weird DNS setup: %s resolves to localhost (127.x.x.x)", host) + elif unixsocket: + self.locationStr = "./u:" + unixsocket + else: + host = host or self._socketaddr[0] + port = port or self._socketaddr[1] + if ":" in host: + self.locationStr = "[%s]:%d" % (host, port) + else: + self.locationStr = "%s:%d" % (host, port) + self.pool = Pool() + self.housekeeper = Housekeeper(daemon) + self.housekeeper.start() + if self._selector: + self._selector.register(self.sock, selectors.EVENT_READ, self) + + def __del__(self): + if self.sock is not None: + self.sock.close() + self.sock = None + if self.pool is not None: + self.pool.close() + self.pool = None + if self.housekeeper: + self.housekeeper.stop.set() + self.housekeeper.join() + self.housekeeper = None + + def __repr__(self): + return "<%s on %s; %d workers>" % (self.__class__.__name__, self.locationStr, self.pool.num_workers()) + + def loop(self, loopCondition=(lambda: True)): + log.debug("threadpool server requestloop") + while self.sock is not None and not self.shutting_down: + if loopCondition(): + try: + self.events([self.sock]) + except (socket.error, OSError) as x: + try: + if not loopCondition(): + break + err = getattr(x, "errno", x.args[0]) + log.warning("socket error '%s' with errno=%d, shouldn't happen", x, err) + continue + finally: + x = None + del x + + except KeyboardInterrupt: + log.debug("stopping on break signal") + break + + def combine_loop(self, server): + raise TypeError("You can't use the loop combiner on the threadpool server type") + + def events(self, eventsockets): + """used for external event loops: handle events that occur on one of the sockets of this server""" + assert self.sock in eventsockets + try: + if self._selector: + events = self._selector.select(config.POLLTIMEOUT) + if not events: + return + else: + csock, caddr = self.sock.accept() + if self.shutting_down: + csock.close() + return + if hasattr(csock, "getpeercert"): + log.debug("connected %s - SSL", caddr) + else: + log.debug("connected %s - unencrypted", caddr) + if config.COMMTIMEOUT: + csock.settimeout(config.COMMTIMEOUT) + else: + job = ClientConnectionJob(csock, caddr, self.daemon) + try: + self.pool.process(job) + except NoFreeWorkersError: + job.denyConnection("no free workers, increase server threadpool size") + + except socket.timeout: + pass + + def shutdown(self): + self.shutting_down = True + self.wakeup() + time.sleep(0.05) + self.close() + self.sock = None + + def close(self): + if self.housekeeper: + self.housekeeper.stop.set() + self.housekeeper.join() + self.housekeeper = None + if self.sock: + sockname = None + try: + sockname = self.sock.getsockname() + except (socket.error, OSError): + pass + + try: + self.sock.close() + if type(sockname) is str: + if os.path.exists(sockname): + os.remove(sockname) + except Exception: + pass + + self.sock = None + self.pool.close() + + @property + def sockets(self): + return [ + self.sock] + + @property + def selector(self): + raise TypeError("threadpool server doesn't have multiplexing selector") + + def wakeup(self): + socketutil.interruptSocket(self._socketaddr) diff --git a/APPS_UNCOMPILED/lib/Pyro4/socketutil.py b/APPS_UNCOMPILED/lib/Pyro4/socketutil.py new file mode 100644 index 0000000..2e0dfb9 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/socketutil.py @@ -0,0 +1,641 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/socketutil.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 23920 bytes +""" +Low level socket utilities. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +import os, socket, errno, time, sys, select, weakref +try: + import ssl +except ImportError: + ssl = None + +from Pyro4.configuration import config +from Pyro4.errors import CommunicationError, TimeoutError, ConnectionClosedError +try: + InterruptedError() +except NameError: + + class InterruptedError(Exception): + pass + + +ERRNO_RETRIES = [ + errno.EINTR, errno.EAGAIN, errno.EWOULDBLOCK, errno.EINPROGRESS] +if hasattr(errno, "WSAEINTR"): + ERRNO_RETRIES.append(errno.WSAEINTR) +if hasattr(errno, "WSAEWOULDBLOCK"): + ERRNO_RETRIES.append(errno.WSAEWOULDBLOCK) +if hasattr(errno, "WSAEINPROGRESS"): + ERRNO_RETRIES.append(errno.WSAEINPROGRESS) +ERRNO_BADF = [errno.EBADF] +if hasattr(errno, "WSAEBADF"): + ERRNO_BADF.append(errno.WSAEBADF) +ERRNO_ENOTSOCK = [errno.ENOTSOCK] +if hasattr(errno, "WSAENOTSOCK"): + ERRNO_ENOTSOCK.append(errno.WSAENOTSOCK) +if not hasattr(socket, "SOL_TCP"): + socket.SOL_TCP = socket.IPPROTO_TCP +ERRNO_EADDRNOTAVAIL = [errno.EADDRNOTAVAIL] +if hasattr(errno, "WSAEADDRNOTAVAIL"): + ERRNO_EADDRNOTAVAIL.append(errno.WSAEADDRNOTAVAIL) +ERRNO_EADDRINUSE = [errno.EADDRINUSE] +if hasattr(errno, "WSAEADDRINUSE"): + ERRNO_EADDRINUSE.append(errno.WSAEADDRINUSE) +if sys.version_info >= (3, 0): + basestring = str + +def getIpVersion(hostnameOrAddress): + """ + Determine what the IP version is of the given hostname or ip address (4 or 6). + First, it resolves the hostname or address to get an IP address. + Then, if the resolved IP contains a ':' it is considered to be an ipv6 address, + and if it contains a '.', it is ipv4. + """ + address = getIpAddress(hostnameOrAddress) + if "." in address: + return 4 + if ":" in address: + return 6 + raise CommunicationError("Unknown IP address format" + address) + + +def getIpAddress(hostname, workaround127=False, ipVersion=None): + """ + Returns the IP address for the given host. If you enable the workaround, + it will use a little hack if the ip address is found to be the loopback address. + The hack tries to discover an externally visible ip address instead (this only works for ipv4 addresses). + Set ipVersion=6 to return ipv6 addresses, 4 to return ipv4, 0 to let OS choose the best one or None to use config.PREFER_IP_VERSION. + """ + + def getaddr(ipVersion): + if ipVersion == 6: + family = socket.AF_INET6 + else: + if ipVersion == 4: + family = socket.AF_INET + else: + if ipVersion == 0: + family = socket.AF_UNSPEC + else: + raise ValueError("unknown value for argument ipVersion.") + ip = socket.getaddrinfo(hostname or socket.gethostname(), 80, family, socket.SOCK_STREAM, socket.SOL_TCP)[0][4][0] + if workaround127: + if ip.startswith("127.") or ip == "0.0.0.0": + ip = getInterfaceAddress("4.2.2.2") + return ip + + try: + if hostname: + if ":" in hostname: + if ipVersion is None: + ipVersion = 0 + if ipVersion is None: + return getaddr(config.PREFER_IP_VERSION) + return getaddr(ipVersion) + except socket.gaierror: + if (ipVersion == 6 or ipVersion) is None: + if config.PREFER_IP_VERSION == 6: + raise socket.error("unable to determine IPV6 address") + return getaddr(0) + + +def getInterfaceAddress(ip_address): + """tries to find the ip address of the interface that connects to the given host's address""" + family = socket.AF_INET if getIpVersion(ip_address) == 4 else socket.AF_INET6 + sock = socket.socket(family, socket.SOCK_DGRAM) + try: + sock.connect((ip_address, 53)) + return sock.getsockname()[0] + finally: + sock.close() + + +def __nextRetrydelay(delay): + if delay == 0.0: + return 0.001 + if delay == 0.001: + return 0.01 + return delay + 0.1 + + +def receiveData(sock, size): + """Retrieve a given number of bytes from a socket. + It is expected the socket is able to supply that number of bytes. + If it isn't, an exception is raised (you will not get a zero length result + or a result that is smaller than what you asked for). The partial data that + has been received however is stored in the 'partialData' attribute of + the exception object.""" + try: + retrydelay = 0.0 + msglen = 0 + chunks = [] + if config.USE_MSG_WAITALL: + if not hasattr(sock, "getpeercert"): + while True: + try: + data = sock.recv(size, socket.MSG_WAITALL) + if len(data) == size: + return data + msglen = len(data) + chunks = [data] + break + except socket.timeout: + raise TimeoutError("receiving: timeout") + except socket.error as x: + try: + err = getattr(x, "errno", x.args[0]) + if err not in ERRNO_RETRIES: + raise ConnectionClosedError("receiving: connection lost: " + str(x)) + time.sleep(1e-05 + retrydelay) + retrydelay = __nextRetrydelay(retrydelay) + finally: + x = None + del x + + while True: + try: + while msglen < size: + chunk = sock.recv(min(60000, size - msglen)) + if not chunk: + break + chunks.append(chunk) + msglen += len(chunk) + + data = (b'').join(chunks) + del chunks + if len(data) != size: + err = ConnectionClosedError("receiving: not enough data") + err.partialData = data + raise err + return data + except socket.timeout: + raise TimeoutError("receiving: timeout") + except socket.error: + x = sys.exc_info()[1] + err = getattr(x, "errno", x.args[0]) + if err not in ERRNO_RETRIES: + raise ConnectionClosedError("receiving: connection lost: " + str(x)) + time.sleep(1e-05 + retrydelay) + retrydelay = __nextRetrydelay(retrydelay) + + except socket.timeout: + raise TimeoutError("receiving: timeout") + + +def sendData(sock, data): + """ + Send some data over a socket. + Some systems have problems with ``sendall()`` when the socket is in non-blocking mode. + For instance, Mac OS X seems to be happy to throw EAGAIN errors too often. + This function falls back to using a regular send loop if needed. + """ + if sock.gettimeout() is None: + try: + sock.sendall(data) + return + except socket.timeout: + raise TimeoutError("sending: timeout") + except socket.error as x: + try: + raise ConnectionClosedError("sending: connection lost: " + str(x)) + finally: + x = None + del x + + else: + retrydelay = 0.0 + while data: + try: + sent = sock.send(data) + data = data[sent[:None]] + except socket.timeout: + raise TimeoutError("sending: timeout") + except socket.error as x: + try: + err = getattr(x, "errno", x.args[0]) + if err not in ERRNO_RETRIES: + raise ConnectionClosedError("sending: connection lost: " + str(x)) + time.sleep(1e-05 + retrydelay) + retrydelay = __nextRetrydelay(retrydelay) + finally: + x = None + del x + + +_GLOBAL_DEFAULT_TIMEOUT = object() + +def createSocket(bind=None, connect=None, reuseaddr=False, keepalive=True, timeout=_GLOBAL_DEFAULT_TIMEOUT, noinherit=False, ipv6=False, nodelay=True, sslContext=None): + """ + Create a socket. Default socket options are keepalive and IPv4 family, and nodelay (nagle disabled). + If 'bind' or 'connect' is a string, it is assumed a Unix domain socket is requested. + Otherwise, a normal tcp/ip socket is used. + Set ipv6=True to create an IPv6 socket rather than IPv4. + Set ipv6=None to use the PREFER_IP_VERSION config setting. + """ + if bind: + if connect: + raise ValueError("bind and connect cannot both be specified at the same time") + forceIPv6 = ipv6 or ipv6 is None and config.PREFER_IP_VERSION == 6 + if isinstance(bind, basestring) or isinstance(connect, basestring): + family = socket.AF_UNIX + else: + if not bind: + if not connect: + family = socket.AF_INET6 if forceIPv6 else socket.AF_INET + else: + if type(bind) is tuple: + if not bind[0]: + family = socket.AF_INET6 if forceIPv6 else socket.AF_INET + else: + if getIpVersion(bind[0]) == 4: + if forceIPv6: + raise ValueError("IPv4 address is used bind argument with forceIPv6 argument:" + bind[0] + ".") + family = socket.AF_INET + else: + if getIpVersion(bind[0]) == 6: + family = socket.AF_INET6 + bind = ( + bind[0], bind[1], 0, 0) + else: + raise ValueError("unknown bind format.") + else: + if type(connect) is tuple: + if not connect[0]: + family = socket.AF_INET6 if forceIPv6 else socket.AF_INET + else: + if getIpVersion(connect[0]) == 4: + if forceIPv6: + raise ValueError("IPv4 address is used in connect argument with forceIPv6 argument:" + bind[0] + ".") + family = socket.AF_INET + else: + if getIpVersion(connect[0]) == 6: + family = socket.AF_INET6 + connect = ( + connect[0], connect[1], 0, 0) + else: + raise ValueError("unknown connect format.") + else: + raise ValueError("unknown bind or connect format.") + else: + sock = socket.socket(family, socket.SOCK_STREAM) + if sslContext: + if bind: + sock = sslContext.wrap_socket(sock, server_side=True) + else: + if connect: + sock = sslContext.wrap_socket(sock, server_side=False, server_hostname=(connect[0])) + else: + sock = sslContext.wrap_socket(sock, server_side=False) + if nodelay: + setNoDelay(sock) + if reuseaddr: + setReuseAddr(sock) + if noinherit: + setNoInherit(sock) + if timeout == 0: + timeout = None + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if bind: + if type(bind) is tuple and bind[1] == 0: + bindOnUnusedPort(sock, bind[0]) + else: + sock.bind(bind) + try: + sock.listen(100) + except (OSError, IOError): + pass + + if connect: + try: + sock.connect(connect) + except socket.error: + xv = sys.exc_info()[1] + errno = getattr(xv, "errno", 0) + if errno in ERRNO_RETRIES and not timeout is _GLOBAL_DEFAULT_TIMEOUT: + if timeout < 0.1: + timeout = 0.1 + while True: + try: + sr, sw, se = select.select([], [sock], [sock], timeout) + except InterruptedError: + continue + + if sock in sw: + break + elif sock in se: + sock.close() + raise socket.error("connect failed") + + else: + sock.close() + raise + + if keepalive: + setKeepalive(sock) + return sock + + +def createBroadcastSocket(bind=None, reuseaddr=False, timeout=_GLOBAL_DEFAULT_TIMEOUT, ipv6=False): + """ + Create a udp broadcast socket. + Set ipv6=True to create an IPv6 socket rather than IPv4. + Set ipv6=None to use the PREFER_IP_VERSION config setting. + """ + forceIPv6 = ipv6 or ipv6 is None and config.PREFER_IP_VERSION == 6 + if not bind: + family = socket.AF_INET6 if forceIPv6 else socket.AF_INET + else: + if type(bind) is tuple: + if not bind[0]: + family = socket.AF_INET6 if forceIPv6 else socket.AF_INET + else: + if getIpVersion(bind[0]) == 4: + if forceIPv6: + raise ValueError("IPv4 address is used with forceIPv6 option:" + bind[0] + ".") + family = socket.AF_INET + else: + if getIpVersion(bind[0]) == 6: + family = socket.AF_INET6 + bind = (bind[0], bind[1], 0, 0) + else: + raise ValueError("unknown bind format: %r" % (bind,)) + else: + raise ValueError("unknown bind format: %r" % (bind,)) + sock = socket.socket(family, socket.SOCK_DGRAM) + if family == socket.AF_INET: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + if reuseaddr: + setReuseAddr(sock) + if timeout is None: + sock.settimeout(None) + else: + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + elif bind: + host = bind[0] or "" + port = bind[1] + if port == 0: + bindOnUnusedPort(sock, host) + else: + if len(bind) == 2: + sock.bind((host, port)) + else: + if len(bind) == 4: + sock.bind((host, port, 0, 0)) + else: + raise ValueError("bind must be None, 2-tuple or 4-tuple") + return sock + + +def setReuseAddr(sock): + """sets the SO_REUSEADDR option on the socket, if possible.""" + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + except Exception: + pass + + +def setNoDelay(sock): + """sets the TCP_NODELAY option on the socket (to disable Nagle's algorithm), if possible.""" + try: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + except Exception: + pass + + +def setKeepalive(sock): + """sets the SO_KEEPALIVE option on the socket, if possible.""" + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + except Exception: + pass + + +try: + import fcntl + + def setNoInherit(sock): + """Mark the given socket fd as non-inheritable to child processes""" + fd = sock.fileno() + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) + + +except ImportError: + try: + if sys.platform == "cli": + raise NotImplementedError("IronPython can't obtain a proper HANDLE from a socket") + from ctypes import windll, WinError, wintypes + _SetHandleInformation = windll.kernel32.SetHandleInformation + _SetHandleInformation.argtypes = [wintypes.HANDLE, wintypes.DWORD, wintypes.DWORD] + _SetHandleInformation.restype = wintypes.BOOL + + def setNoInherit(sock): + """Mark the given socket fd as non-inheritable to child processes""" + if not _SetHandleInformation(sock.fileno(), 1, 0): + raise WinError() + + + except (ImportError, NotImplementedError): + + def setNoInherit(sock): + """Mark the given socket fd as non-inheritable to child processes (dummy)""" + pass + + +class SocketConnection(object): + __doc__ = "A wrapper class for plain sockets, containing various methods such as :meth:`send` and :meth:`recv`" + + def __init__(self, sock, objectId=None, keep_open=False): + self.sock = sock + self.objectId = objectId + self.pyroInstances = {} + self.tracked_resources = weakref.WeakSet() + self.keep_open = keep_open + + def __del__(self): + self.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def send(self, data): + sendData(self.sock, data) + + def recv(self, size): + return receiveData(self.sock, size) + + def close(self): + if self.keep_open: + return + try: + self.sock.shutdown(socket.SHUT_RDWR) + except: + pass + + try: + self.sock.close() + except: + pass + + self.pyroInstances.clear() + for rsc in self.tracked_resources: + try: + rsc.close() + except Exception: + pass + + self.tracked_resources.clear() + + def fileno(self): + return self.sock.fileno() + + def family(self): + return family_str(self.sock) + + def setTimeout(self, timeout): + self.sock.settimeout(timeout) + + def getTimeout(self): + return self.sock.gettimeout() + + def getpeercert(self): + try: + return self.sock.getpeercert() + except AttributeError: + return + + timeout = property(getTimeout, setTimeout) + + +def family_str(sock): + f = sock.family + if f == socket.AF_INET: + return "IPv4" + if f == socket.AF_INET6: + return "IPv6" + if hasattr(socket, "AF_UNIX"): + if f == socket.AF_UNIX: + return "Unix" + return "???" + + +def findProbablyUnusedPort(family=socket.AF_INET, socktype=socket.SOCK_STREAM): + """Returns an unused port that should be suitable for binding (likely, but not guaranteed). + This code is copied from the stdlib's test.test_support module.""" + tempsock = socket.socket(family, socktype) + try: + port = bindOnUnusedPort(tempsock) + if sys.platform == "cli": + return port + 1 + return port + finally: + tempsock.close() + + +def bindOnUnusedPort(sock, host='localhost'): + """Bind the socket to a free port and return the port number. + This code is based on the code in the stdlib's test.test_support module.""" + if sock.family in (socket.AF_INET, socket.AF_INET6): + if sock.type == socket.SOCK_STREAM: + if hasattr(socket, "SO_EXCLUSIVEADDRUSE"): + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + except socket.error: + pass + + if sock.family == socket.AF_INET: + if host == "localhost": + sock.bind(('127.0.0.1', 0)) + else: + sock.bind((host, 0)) + else: + if sock.family == socket.AF_INET6: + if host == "localhost": + sock.bind(('::1', 0, 0, 0)) + else: + sock.bind((host, 0, 0, 0)) + else: + raise CommunicationError("unsupported socket family: " + sock.family) + return sock.getsockname()[1] + + +def interruptSocket(address): + """bit of a hack to trigger a blocking server to get out of the loop, useful at clean shutdowns""" + try: + sock = createSocket(connect=address, keepalive=False, timeout=None) + try: + sock.sendall(b'!!!!!!!!!!!!!!!!') + except (socket.error, AttributeError): + pass + + try: + sock.shutdown(socket.SHUT_RDWR) + except (OSError, socket.error): + pass + + sock.close() + except socket.error: + pass + + +__ssl_server_context = None +__ssl_client_context = None + +def getSSLcontext(servercert='', serverkey='', clientcert='', clientkey='', cacerts='', keypassword=''): + """creates an SSL context and caches it, so you have to set the parameters correctly before doing anything""" + global __ssl_client_context + global __ssl_server_context + if not ssl: + raise ValueError("SSL requested but ssl module is not available") + else: + if sys.version_info < (2, 7, 11): + raise RuntimeError("need Python 2.7.11 or newer to properly use SSL") + else: + if servercert: + if clientcert: + raise ValueError("can't have both server cert and client cert") + elif __ssl_server_context: + return __ssl_server_context + if not os.path.isfile(servercert): + raise IOError("server cert file not found") + if serverkey: + if not os.path.isfile(serverkey): + raise IOError("server key file not found") + __ssl_server_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + __ssl_server_context.load_cert_chain(servercert, serverkey or None, keypassword or None) + if cacerts: + if os.path.isdir(cacerts): + __ssl_server_context.load_verify_locations(capath=cacerts) + else: + __ssl_server_context.load_verify_locations(cafile=cacerts) + if config.SSL_REQUIRECLIENTCERT: + __ssl_server_context.verify_mode = ssl.CERT_REQUIRED + else: + __ssl_server_context.verify_mode = ssl.CERT_NONE + return __ssl_server_context + if __ssl_client_context: + return __ssl_client_context + __ssl_client_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) + if clientcert: + if not os.path.isfile(clientcert): + raise IOError("client cert file not found") + else: + __ssl_client_context.load_cert_chain(clientcert, clientkey or None, keypassword or None) + if cacerts: + if os.path.isdir(cacerts): + __ssl_client_context.load_verify_locations(capath=cacerts) + else: + __ssl_client_context.load_verify_locations(cafile=cacerts) + return __ssl_client_context diff --git a/APPS_UNCOMPILED/lib/Pyro4/test/__init__.py b/APPS_UNCOMPILED/lib/Pyro4/test/__init__.py new file mode 100644 index 0000000..4cf3301 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/test/__init__.py @@ -0,0 +1,8 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/test/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 31 bytes +pass diff --git a/APPS_UNCOMPILED/lib/Pyro4/test/echoserver.py b/APPS_UNCOMPILED/lib/Pyro4/test/echoserver.py new file mode 100644 index 0000000..220679e --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/test/echoserver.py @@ -0,0 +1,208 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/test/echoserver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 7561 bytes +""" +Echo server for test purposes. +This is usually invoked by starting this module as a script: + + :command:`python -m Pyro4.test.echoserver` + or simply: :command:`pyro4-test-echoserver` + +It is also possible to use the :class:`EchoServer` in user code +but that is not terribly useful. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import sys, os, time, warnings, threading +from optparse import OptionParser +from Pyro4 import core, naming +from Pyro4.configuration import config +__all__ = [ + "EchoServer"] + +@core.expose +class EchoServer(object): + __doc__ = "\n The echo server object that is provided as a Pyro object by this module.\n If its :attr:`verbose` attribute is set to ``True``, it will print messages as it receives calls.\n " + _verbose = False + _must_shutdown = False + + def echo(self, message): + """return the message""" + if self._verbose: + message_str = repr(message).encode((sys.stdout.encoding), errors="replace").decode(sys.stdout.encoding) + print("%s - echo: %s" % (time.asctime(), message_str)) + return message + + def error(self): + """generates a simple exception without text""" + if self._verbose: + print("%s - error: generating exception" % time.asctime()) + raise ValueError("expected error from echoserver error() method") + + def error_with_text(self): + """generates a simple exception with message""" + if self._verbose: + print("%s - error: generating exception" % time.asctime()) + raise ValueError("the message of the error") + + @core.oneway + def oneway_echo(self, message): + """just like echo, but oneway; the client won't wait for response""" + if self._verbose: + message_str = repr(message).encode((sys.stdout.encoding), errors="replace").decode(sys.stdout.encoding) + print("%s - oneway_echo: %s" % (time.asctime(), message_str)) + return "bogus return value" + + def slow(self): + """returns (and prints) a message after a certain delay""" + if self._verbose: + print("%s - slow: waiting a bit..." % time.asctime()) + time.sleep(5) + if self._verbose: + print("%s - slow: returning result" % time.asctime()) + return "Finally, an answer!" + + def generator(self): + """a generator function that returns some elements on demand""" + yield "one" + yield "two" + yield "three" + + def nan(self): + return float("nan") + + def inf(self): + return float("inf") + + @core.oneway + def oneway_slow(self): + """prints a message after a certain delay, and returns; but the client won't wait for it""" + if self._verbose: + print("%s - oneway_slow: waiting a bit..." % time.asctime()) + time.sleep(5) + if self._verbose: + print("%s - oneway_slow: returning result" % time.asctime()) + return "bogus return value" + + def _private(self): + """a 'private' method that should not be accessible""" + return "should not be allowed" + + def __private(self): + """another 'private' method that should not be accessible""" + return "should not be allowed" + + def __dunder__(self): + """a double underscore method that should be accessible normally""" + return "should be allowed (dunder)" + + def shutdown(self): + """called to signal the echo server to shut down""" + if self._verbose: + print("%s - shutting down" % time.asctime()) + self._must_shutdown = True + + @property + def verbose(self): + return self._verbose + + @verbose.setter + def verbose(self, onoff): + self._verbose = bool(onoff) + + +class NameServer(threading.Thread): + + def __init__(self, hostname, hmac=None): + super(NameServer, self).__init__() + self.setDaemon(1) + self.hostname = hostname + self.hmac = hmac + self.started = threading.Event() + + def run(self): + self.uri, self.ns_daemon, self.bc_server = naming.startNS((self.hostname), hmac=(self.hmac)) + self.started.set() + if self.bc_server: + self.bc_server.runInThread() + self.ns_daemon.requestLoop() + + +def startNameServer(host, hmac=None): + ns = NameServer(host, hmac=hmac) + ns.start() + ns.started.wait() + return ns + + +def main(args=None, returnWithoutLooping=False): + parser = OptionParser() + parser.add_option("-H", "--host", default="localhost", help="hostname to bind server on (default=%default)") + parser.add_option("-p", "--port", type="int", default=0, help="port to bind server on") + parser.add_option("-u", "--unixsocket", help="Unix domain socket name to bind server on") + parser.add_option("-n", "--naming", action="store_true", default=False, help="register with nameserver") + parser.add_option("-N", "--nameserver", action="store_true", default=False, help="also start a nameserver") + parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose output") + parser.add_option("-q", "--quiet", action="store_true", default=False, help="don't output anything") + parser.add_option("-k", "--key", help="the HMAC key to use (deprecated)") + options, args = parser.parse_args(args) + if options.key: + warnings.warn("using -k to supply HMAC key on the command line is a security problem and is deprecated since Pyro 4.72. See the documentation for an alternative.") + if "PYRO_HMAC_KEY" in os.environ: + if options.key: + raise SystemExit("error: don't use -k and PYRO_HMAC_KEY at the same time") + options.key = os.environ["PYRO_HMAC_KEY"] + if options.verbose: + options.quiet = False + if not options.quiet: + print("Starting Pyro's built-in test echo server.") + config.SERVERTYPE = "multiplex" + hmac = (options.key or "").encode("utf-8") + if not hmac: + if not options.quiet: + print("Warning: HMAC key not set. Anyone can connect to this server!") + nameserver = None + if options.nameserver: + options.naming = True + nameserver = startNameServer((options.host), hmac=hmac) + d = core.Daemon(host=(options.host), port=(options.port), unixsocket=(options.unixsocket)) + if hmac: + d._pyroHmacKey = hmac + echo = EchoServer() + echo._verbose = options.verbose + objectName = "test.echoserver" + uri = d.register(echo, objectName) + if options.naming: + host, port = (None, None) + if nameserver is not None: + host, port = nameserver.uri.host, nameserver.uri.port + ns = naming.locateNS(host, port, hmac_key=hmac) + ns.register(objectName, uri) + if options.verbose: + print("using name server at %s" % ns._pyroUri) + if nameserver is not None: + if nameserver.bc_server: + print("broadcast server running at %s" % nameserver.bc_server.locationStr) + else: + print("not using a broadcast server") + else: + if options.verbose: + print("not using a name server.") + if not options.quiet: + print("object name: %s" % objectName) + print("echo uri: %s" % uri) + print("echoserver running.") + if returnWithoutLooping: + return ( + d, echo, uri) + d.requestLoop(loopCondition=(lambda: not echo._must_shutdown)) + d.close() + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/lib/Pyro4/util.py b/APPS_UNCOMPILED/lib/Pyro4/util.py new file mode 100644 index 0000000..aafd116 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/util.py @@ -0,0 +1,976 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/util.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 41036 bytes +""" +Miscellaneous utilities, and serializers. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +import array, sys, zlib, uuid, logging, linecache, traceback, inspect, struct, datetime, decimal, numbers +from Pyro4 import errors +from Pyro4.configuration import config +try: + import copyreg +except ImportError: + import copy_reg as copyreg + +log = logging.getLogger("Pyro4.util") + +def getPyroTraceback(ex_type=None, ex_value=None, ex_tb=None): + """Returns a list of strings that form the traceback information of a + Pyro exception. Any remote Pyro exception information is included. + Traceback information is automatically obtained via ``sys.exc_info()`` if + you do not supply the objects yourself.""" + + def formatRemoteTraceback(remote_tb_lines): + result = [ + " +--- This exception occured remotely (Pyro) - Remote traceback:"] + for line in remote_tb_lines: + if line.endswith("\n"): + line = line[None[:-1]] + lines = line.split("\n") + for line2 in lines: + result.append("\n | ") + result.append(line2) + + result.append("\n +--- End of remote traceback\n") + return result + + try: + if ex_type is not None: + if ex_value is None: + if ex_tb is None: + if type(ex_type) is not type: + raise TypeError("invalid argument: ex_type should be an exception type, or just supply no arguments at all") + if ex_type is None: + if ex_tb is None: + ex_type, ex_value, ex_tb = sys.exc_info() + remote_tb = getattr(ex_value, "_pyroTraceback", None) + local_tb = formatTraceback(ex_type, ex_value, ex_tb, config.DETAILED_TRACEBACK) + if remote_tb: + remote_tb = formatRemoteTraceback(remote_tb) + return local_tb + remote_tb + return local_tb + finally: + del ex_type + del ex_value + del ex_tb + + +def formatTraceback(ex_type=None, ex_value=None, ex_tb=None, detailed=False): + """Formats an exception traceback. If you ask for detailed formatting, + the result will contain info on the variables in each stack frame. + You don't have to provide the exception info objects, if you omit them, + this function will obtain them itself using ``sys.exc_info()``.""" + if ex_type is not None: + if ex_value is None: + if ex_tb is None: + if type(ex_type) is not type: + raise TypeError("invalid argument: ex_type should be an exception type, or just supply no arguments at all") + if ex_type is None: + if ex_tb is None: + ex_type, ex_value, ex_tb = sys.exc_info() + if detailed and sys.platform != "cli": + + def makeStrValue(value): + try: + return repr(value) + except: + try: + return str(value) + except: + return "" + + try: + result = [ + "----------------------------------------------------\n"] + result.append(" EXCEPTION %s: %s\n" % (ex_type, ex_value)) + result.append(" Extended stacktrace follows (most recent call last)\n") + skipLocals = True + while ex_tb: + frame = ex_tb.tb_frame + sourceFileName = frame.f_code.co_filename + if "self" in frame.f_locals: + location = "%s.%s" % (frame.f_locals["self"].__class__.__name__, frame.f_code.co_name) + else: + location = frame.f_code.co_name + result.append("----------------------------------------------------\n") + result.append('File "%s", line %d, in %s\n' % (sourceFileName, ex_tb.tb_lineno, location)) + result.append("Source code:\n") + result.append(" " + linecache.getline(sourceFileName, ex_tb.tb_lineno).strip() + "\n") + if not skipLocals: + names = set() + names.update(getattr(frame.f_code, "co_varnames", ())) + names.update(getattr(frame.f_code, "co_names", ())) + names.update(getattr(frame.f_code, "co_cellvars", ())) + names.update(getattr(frame.f_code, "co_freevars", ())) + result.append("Local values:\n") + for name2 in sorted(names): + if name2 in frame.f_locals: + value = frame.f_locals[name2] + result.append(" %s = %s\n" % (name2, makeStrValue(value))) + if name2 == "self": + for name3, value in vars(value).items(): + result.append(" self.%s = %s\n" % (name3, makeStrValue(value))) + + skipLocals = False + ex_tb = ex_tb.tb_next + + result.append("----------------------------------------------------\n") + result.append(" EXCEPTION %s: %s\n" % (ex_type, ex_value)) + result.append("----------------------------------------------------\n") + return result + except Exception: + return [ + "----------------------------------------------------\nError building extended traceback!!! :\n", + "".join((traceback.format_exception)(*sys.exc_info())) + "----------------------------------------------------" + "\n", + "Original Exception follows:\n", + "".join(traceback.format_exception(ex_type, ex_value, ex_tb))] + + else: + return traceback.format_exception(ex_type, ex_value, ex_tb) + + +all_exceptions = {} +if sys.version_info < (3, 0): + import exceptions + for name, t in vars(exceptions).items(): + if type(t) is type and issubclass(t, BaseException): + all_exceptions[name] = t + +else: + import builtins + for name, t in vars(builtins).items(): + if type(t) is type and issubclass(t, BaseException): + all_exceptions[name] = t + + buffer = bytearray +for name, t in vars(errors).items(): + if type(t) is type and issubclass(t, errors.PyroError): + all_exceptions[name] = t + +class SerializerBase(object): + __doc__ = "Base class for (de)serializer implementations (which must be thread safe)" + _SerializerBase__custom_class_to_dict_registry = {} + _SerializerBase__custom_dict_to_class_registry = {} + + def serializeData(self, data, compress=False): + """Serialize the given data object, try to compress if told so. + Returns a tuple of the serialized data (bytes) and a bool indicating if it is compressed or not.""" + data = self.dumps(data) + return self._SerializerBase__compressdata(data, compress) + + def deserializeData(self, data, compressed=False): + """Deserializes the given data (bytes). Set compressed to True to decompress the data first.""" + if compressed: + if sys.version_info < (3, 0): + data = self._convertToBytes(data) + data = zlib.decompress(data) + return self.loads(data) + + def serializeCall(self, obj, method, vargs, kwargs, compress=False): + """Serialize the given method call parameters, try to compress if told so. + Returns a tuple of the serialized data and a bool indicating if it is compressed or not.""" + data = self.dumpsCall(obj, method, vargs, kwargs) + return self._SerializerBase__compressdata(data, compress) + + def deserializeCall(self, data, compressed=False): + """Deserializes the given call data back to (object, method, vargs, kwargs) tuple. + Set compressed to True to decompress the data first.""" + if compressed: + if sys.version_info < (3, 0): + data = self._convertToBytes(data) + data = zlib.decompress(data) + return self.loadsCall(data) + + def loads(self, data): + raise NotImplementedError("implement in subclass") + + def loadsCall(self, data): + raise NotImplementedError("implement in subclass") + + def dumps(self, data): + raise NotImplementedError("implement in subclass") + + def dumpsCall(self, obj, method, vargs, kwargs): + raise NotImplementedError("implement in subclass") + + def _convertToBytes(self, data): + t = type(data) + if t is not bytes: + if t in (bytearray, buffer): + return bytes(data) + if t is memoryview: + return data.tobytes() + return data + + def __compressdata(self, data, compress): + if not compress or len(data) < 200: + return ( + data, False) + compressed = zlib.compress(data) + if len(compressed) < len(data): + return ( + compressed, True) + return ( + data, False) + + @classmethod + def register_type_replacement(cls, object_type, replacement_function): + raise NotImplementedError("implement in subclass") + + @classmethod + def register_class_to_dict(cls, clazz, converter, serpent_too=True): + """Registers a custom function that returns a dict representation of objects of the given class. + The function is called with a single parameter; the object to be converted to a dict.""" + cls._SerializerBase__custom_class_to_dict_registry[clazz] = converter + if serpent_too: + try: + get_serializer_by_id(SerpentSerializer.serializer_id) + import serpent + + def serpent_converter(obj, serializer, stream, level): + d = converter(obj) + serializer.ser_builtins_dict(d, stream, level) + + serpent.register_class(clazz, serpent_converter) + except errors.ProtocolError: + pass + + @classmethod + def unregister_class_to_dict(cls, clazz): + """Removes the to-dict conversion function registered for the given class. Objects of the class + will be serialized by the default mechanism again.""" + if clazz in cls._SerializerBase__custom_class_to_dict_registry: + del cls._SerializerBase__custom_class_to_dict_registry[clazz] + try: + get_serializer_by_id(SerpentSerializer.serializer_id) + import serpent + serpent.unregister_class(clazz) + except errors.ProtocolError: + pass + + @classmethod + def register_dict_to_class(cls, classname, converter): + """ + Registers a custom converter function that creates objects from a dict with the given classname tag in it. + The function is called with two parameters: the classname and the dictionary to convert to an instance of the class. + + This mechanism is not used for the pickle serializer. + """ + cls._SerializerBase__custom_dict_to_class_registry[classname] = converter + + @classmethod + def unregister_dict_to_class(cls, classname): + """ + Removes the converter registered for the given classname. Dicts with that classname tag + will be deserialized by the default mechanism again. + + This mechanism is not used for the pickle serializer. + """ + if classname in cls._SerializerBase__custom_dict_to_class_registry: + del cls._SerializerBase__custom_dict_to_class_registry[classname] + + @classmethod + def class_to_dict(cls, obj): + """ + Convert a non-serializable object to a dict. Partly borrowed from serpent. + Not used for the pickle serializer. + """ + for clazz in cls._SerializerBase__custom_class_to_dict_registry: + if isinstance(obj, clazz): + return cls._SerializerBase__custom_class_to_dict_registry[clazz](obj) + + if type(obj) in (set, dict, tuple, list): + raise ValueError("can't serialize type " + str(obj.__class__) + " into a dict") + if hasattr(obj, "_pyroDaemon"): + obj._pyroDaemon = None + if isinstance(obj, BaseException): + return {'__class__':(obj.__class__.__module__ + ".") + (obj.__class__.__name__), + '__exception__':True, + 'args':obj.args, + 'attributes':vars(obj)} + try: + value = obj.__getstate__() + except AttributeError: + pass + else: + if isinstance(value, dict): + return value + else: + try: + value = dict(vars(obj)) + value["__class__"] = obj.__class__.__module__ + "." + obj.__class__.__name__ + return value + except TypeError: + if hasattr(obj, "__slots__"): + value = {} + for slot in obj.__slots__: + value[slot] = getattr(obj, slot) + + value["__class__"] = obj.__class__.__module__ + "." + obj.__class__.__name__ + return value + raise errors.SerializeError("don't know how to serialize class " + str(obj.__class__) + " using serializer " + str(cls.__name__) + ". Give it vars() or an appropriate __getstate__") + + @classmethod + def dict_to_class(cls, data): + """ + Recreate an object out of a dict containing the class name and the attributes. + Only a fixed set of classes are recognized. + Not used for the pickle serializer. + """ + from Pyro4 import core, futures + classname = data.get("__class__", "") + if isinstance(classname, bytes): + classname = classname.decode("utf-8") + if classname in cls._SerializerBase__custom_dict_to_class_registry: + converter = cls._SerializerBase__custom_dict_to_class_registry[classname] + return converter(classname, data) + if "__" in classname: + raise errors.SecurityError("refused to deserialize types with double underscores in their name: " + classname) + if classname.startswith("Pyro4.core."): + if classname == "Pyro4.core.URI": + uri = core.URI.__new__(core.URI) + uri.__setstate_from_dict__(data["state"]) + return uri + if classname == "Pyro4.core.Proxy": + proxy = core.Proxy.__new__(core.Proxy) + proxy.__setstate_from_dict__(data["state"]) + return proxy + if classname == "Pyro4.core.Daemon": + daemon = core.Daemon.__new__(core.Daemon) + daemon.__setstate_from_dict__(data["state"]) + return daemon + else: + if classname.startswith("Pyro4.util."): + if classname == "Pyro4.util.SerpentSerializer": + return SerpentSerializer() + if classname == "Pyro4.util.PickleSerializer": + return PickleSerializer() + if classname == "Pyro4.util.MarshalSerializer": + return MarshalSerializer() + if classname == "Pyro4.util.JsonSerializer": + return JsonSerializer() + if classname == "Pyro4.util.MsgpackSerializer": + return MsgpackSerializer() + if classname == "Pyro4.util.CloudpickleSerializer": + return CloudpickleSerializer() + if classname == "Pyro4.util.DillSerializer": + return DillSerializer() + elif classname.startswith("Pyro4.errors."): + errortype = getattr(errors, classname.split(".", 2)[2]) + if issubclass(errortype, errors.PyroError): + return SerializerBase.make_exception(errortype, data) + elif classname == "Pyro4.futures._ExceptionWrapper": + ex = data["exception"] + if isinstance(ex, dict): + if "__class__" in ex: + ex = SerializerBase.dict_to_class(ex) + return futures._ExceptionWrapper(ex) + if data.get("__exception__", False): + if classname in all_exceptions: + return SerializerBase.make_exception(all_exceptions[classname], data) + namespace, short_classname = classname.split(".", 1) + if namespace in ('builtins', 'exceptions'): + if sys.version_info < (3, 0): + exceptiontype = getattr(exceptions, short_classname) + if issubclass(exceptiontype, BaseException): + return SerializerBase.make_exception(exceptiontype, data) + else: + exceptiontype = getattr(builtins, short_classname) + if issubclass(exceptiontype, BaseException): + return SerializerBase.make_exception(exceptiontype, data) + else: + if namespace == "sqlite3": + if short_classname.endswith("Error"): + import sqlite3 + exceptiontype = getattr(sqlite3, short_classname) + if issubclass(exceptiontype, BaseException): + return SerializerBase.make_exception(exceptiontype, data) + log.warning("unsupported serialized class: " + classname) + raise errors.SerializeError("unsupported serialized class: " + classname) + + @staticmethod + def make_exception(exceptiontype, data): + ex = exceptiontype(*data["args"]) + if "attributes" in data: + for attr, value in data["attributes"].items(): + setattr(ex, attr, value) + + return ex + + def recreate_classesParse error at or near `LOAD_SETCOMP' instruction at offset 20 + + def __eq__(self, other): + """this equality method is only to support the unit tests of this class""" + return isinstance(other, SerializerBase) and vars(self) == vars(other) + + def __ne__(self, other): + return not self.__eq__(other) + + __hash__ = object.__hash__ + + +class PickleSerializer(SerializerBase): + __doc__ = "\n A (de)serializer that wraps the Pickle serialization protocol.\n It can optionally compress the serialized data, and is thread safe.\n " + serializer_id = 4 + + def dumpsCall(self, obj, method, vargs, kwargs): + return pickle.dumps((obj, method, vargs, kwargs), config.PICKLE_PROTOCOL_VERSION) + + def dumps(self, data): + return pickle.dumps(data, config.PICKLE_PROTOCOL_VERSION) + + def loadsCall(self, data): + data = self._convertToBytes(data) + return pickle.loads(data) + + def loads(self, data): + data = self._convertToBytes(data) + return pickle.loads(data) + + @classmethod + def register_type_replacement(cls, object_type, replacement_function): + + def copyreg_function(obj): + return replacement_function(obj).__reduce__() + + if not (object_type is type or inspect.isclass(object_type)): + raise ValueError("refusing to register replacement for a non-type or the type 'type' itself") + try: + copyreg.pickle(object_type, copyreg_function) + except TypeError: + pass + + +class CloudpickleSerializer(SerializerBase): + __doc__ = "\n A (de)serializer that wraps the Cloudpickle serialization protocol.\n It can optionally compress the serialized data, and is thread safe.\n " + serializer_id = 7 + + def dumpsCall(self, obj, method, vargs, kwargs): + return cloudpickle.dumps((obj, method, vargs, kwargs), config.PICKLE_PROTOCOL_VERSION) + + def dumps(self, data): + return cloudpickle.dumps(data, config.PICKLE_PROTOCOL_VERSION) + + def loadsCall(self, data): + return cloudpickle.loads(data) + + def loads(self, data): + return cloudpickle.loads(data) + + @classmethod + def register_type_replacement(cls, object_type, replacement_function): + + def copyreg_function(obj): + return replacement_function(obj).__reduce__() + + if not (object_type is type or inspect.isclass(object_type)): + raise ValueError("refusing to register replacement for a non-type or the type 'type' itself") + try: + copyreg.pickle(object_type, copyreg_function) + except TypeError: + pass + + +class DillSerializer(SerializerBase): + __doc__ = "\n A (de)serializer that wraps the Dill serialization protocol.\n It can optionally compress the serialized data, and is thread safe.\n " + serializer_id = 5 + + def dumpsCall(self, obj, method, vargs, kwargs): + return dill.dumps((obj, method, vargs, kwargs), config.DILL_PROTOCOL_VERSION) + + def dumps(self, data): + return dill.dumps(data, config.DILL_PROTOCOL_VERSION) + + def loadsCall(self, data): + return dill.loads(data) + + def loads(self, data): + return dill.loads(data) + + @classmethod + def register_type_replacement(cls, object_type, replacement_function): + + def copyreg_function(obj): + return replacement_function(obj).__reduce__() + + if not (object_type is type or inspect.isclass(object_type)): + raise ValueError("refusing to register replacement for a non-type or the type 'type' itself") + try: + copyreg.pickle(object_type, copyreg_function) + except TypeError: + pass + + +class MarshalSerializer(SerializerBase): + __doc__ = "(de)serializer that wraps the marshal serialization protocol." + serializer_id = 3 + + def dumpsCallParse error at or near `LOAD_DICTCOMP' instruction at offset 22 + + def dumps(self, data): + return marshal.dumps(self.convert_obj_into_marshallable(data)) + + if sys.platform == "cli": + + def loadsCall(self, data): + if type(data) is not str: + data = str(data) + obj, method, vargs, kwargs = marshal.loads(data) + vargs = self.recreate_classes(vargs) + kwargs = self.recreate_classes(kwargs) + return (obj, method, vargs, kwargs) + + def loads(self, data): + if type(data) is not str: + data = str(data) + return self.recreate_classes(marshal.loads(data)) + + else: + + def loadsCall(self, data): + data = self._convertToBytes(data) + obj, method, vargs, kwargs = marshal.loads(data) + vargs = self.recreate_classes(vargs) + kwargs = self.recreate_classes(kwargs) + return (obj, method, vargs, kwargs) + + def loads(self, data): + data = self._convertToBytes(data) + return self.recreate_classes(marshal.loads(data)) + + marshalable_types = (str, int, float, type(None), bool, complex, bytes, bytearray, + tuple, set, frozenset, list, dict) + if sys.version_info < (3, 0): + marshalable_types += (unicode,) + + def convert_obj_into_marshallable(self, obj): + if isinstance(obj, self.marshalable_types): + return obj + if isinstance(obj, array.array): + if obj.typecode == "c": + return obj.tostring() + if obj.typecode == "u": + return obj.tounicode() + return obj.tolist() + return self.class_to_dict(obj) + + @classmethod + def class_to_dict(cls, obj): + if isinstance(obj, uuid.UUID): + return str(obj) + return super(MarshalSerializer, cls).class_to_dict(obj) + + @classmethod + def register_type_replacement(cls, object_type, replacement_function): + pass + + +class SerpentSerializer(SerializerBase): + __doc__ = "(de)serializer that wraps the serpent serialization protocol." + serializer_id = 1 + + def dumpsCall(self, obj, method, vargs, kwargs): + return serpent.dumps((obj, method, vargs, kwargs), module_in_classname=True) + + def dumps(self, data): + return serpent.dumps(data, module_in_classname=True) + + def loadsCall(self, data): + obj, method, vargs, kwargs = serpent.loads(data) + vargs = self.recreate_classes(vargs) + kwargs = self.recreate_classes(kwargs) + return (obj, method, vargs, kwargs) + + def loads(self, data): + return self.recreate_classes(serpent.loads(data)) + + @classmethod + def register_type_replacement(cls, object_type, replacement_function): + + def custom_serializer(object, serpent_serializer, outputstream, indentlevel): + replaced = replacement_function(object) + if replaced is object: + serpent_serializer.ser_default_class(replaced, outputstream, indentlevel) + else: + serpent_serializer._serialize(replaced, outputstream, indentlevel) + + if not (object_type is type or inspect.isclass(object_type)): + raise ValueError("refusing to register replacement for a non-type or the type 'type' itself") + serpent.register_class(object_type, custom_serializer) + + @classmethod + def dict_to_class(cls, data): + if data.get("__class__") == "float": + return float(data["value"]) + return super(SerpentSerializer, cls).dict_to_class(data) + + +class JsonSerializer(SerializerBase): + __doc__ = "(de)serializer that wraps the json serialization protocol." + serializer_id = 2 + _JsonSerializer__type_replacements = {} + + def dumpsCall(self, obj, method, vargs, kwargs): + data = { + 'object': obj, 'method': method, 'params': vargs, 'kwargs': kwargs} + data = json.dumps(data, ensure_ascii=False, default=(self.default)) + return data.encode("utf-8") + + def dumps(self, data): + data = json.dumps(data, ensure_ascii=False, default=(self.default)) + return data.encode("utf-8") + + def loadsCall(self, data): + data = self._convertToBytes(data).decode("utf-8") + data = json.loads(data) + vargs = self.recreate_classes(data["params"]) + kwargs = self.recreate_classes(data["kwargs"]) + return (data["object"], data["method"], vargs, kwargs) + + def loads(self, data): + data = self._convertToBytes(data).decode("utf-8") + return self.recreate_classes(json.loads(data)) + + def default(self, obj): + replacer = self._JsonSerializer__type_replacements.get(type(obj), None) + if replacer: + obj = replacer(obj) + if isinstance(obj, set): + return tuple(obj) + if isinstance(obj, uuid.UUID): + return str(obj) + if isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + if isinstance(obj, decimal.Decimal): + return str(obj) + if isinstance(obj, array.array): + if obj.typecode == "c": + return obj.tostring() + if obj.typecode == "u": + return obj.tounicode() + return obj.tolist() + return self.class_to_dict(obj) + + @classmethod + def register_type_replacement(cls, object_type, replacement_function): + if not (object_type is type or inspect.isclass(object_type)): + raise ValueError("refusing to register replacement for a non-type or the type 'type' itself") + cls._JsonSerializer__type_replacements[object_type] = replacement_function + + +class MsgpackSerializer(SerializerBase): + __doc__ = "(de)serializer that wraps the msgpack serialization protocol." + serializer_id = 6 + _MsgpackSerializer__type_replacements = {} + + def dumpsCall(self, obj, method, vargs, kwargs): + return msgpack.packb((obj, method, vargs, kwargs), use_bin_type=True, default=(self.default)) + + def dumps(self, data): + return msgpack.packb(data, use_bin_type=True, default=(self.default)) + + def loadsCall(self, data): + data = self._convertToBytes(data) + obj, method, vargs, kwargs = msgpack.unpackb(data, raw=False, object_hook=(self.object_hook)) + return (obj, method, vargs, kwargs) + + def loads(self, data): + data = self._convertToBytes(data) + return msgpack.unpackb(data, raw=False, object_hook=(self.object_hook), ext_hook=(self.ext_hook)) + + def default(self, obj): + replacer = self._MsgpackSerializer__type_replacements.get(type(obj), None) + if replacer: + obj = replacer(obj) + if isinstance(obj, set): + return tuple(obj) + if isinstance(obj, uuid.UUID): + return str(obj) + if isinstance(obj, bytearray): + return bytes(obj) + if isinstance(obj, complex): + return msgpack.ExtType(48, struct.pack("dd", obj.real, obj.imag)) + if isinstance(obj, datetime.datetime): + if obj.tzinfo: + raise errors.SerializeError("msgpack cannot serialize datetime with timezone info") + return msgpack.ExtType(50, struct.pack("d", obj.timestamp())) + if isinstance(obj, datetime.date): + return msgpack.ExtType(51, struct.pack("l", obj.toordinal())) + if isinstance(obj, decimal.Decimal): + return str(obj) + if isinstance(obj, numbers.Number): + return msgpack.ExtType(49, str(obj).encode("ascii")) + if isinstance(obj, array.array): + if obj.typecode == "c": + return obj.tostring() + if obj.typecode == "u": + return obj.tounicode() + return obj.tolist() + return self.class_to_dict(obj) + + def object_hook(self, obj): + if "__class__" in obj: + return self.dict_to_class(obj) + return obj + + def ext_hook(self, code, data): + if code == 48: + real, imag = struct.unpack("dd", data) + return complex(real, imag) + if code == 49: + return int(data) + if code == 50: + return datetime.datetime.fromtimestamp(struct.unpack("d", data)[0]) + if code == 51: + return datetime.date.fromordinal(struct.unpack("l", data)[0]) + raise errors.SerializeError("invalid ext code for msgpack: " + str(code)) + + @classmethod + def register_type_replacement(cls, object_type, replacement_function): + if not (object_type is type or inspect.isclass(object_type)): + raise ValueError("refusing to register replacement for a non-type or the type 'type' itself") + cls._MsgpackSerializer__type_replacements[object_type] = replacement_function + + +_serializers = {} +_serializers_by_id = {} + +def get_serializer(name): + try: + return _serializers[name] + except KeyError: + raise errors.SerializeError("serializer '%s' is unknown or not available" % name) + + +def get_serializer_by_id(sid): + try: + return _serializers_by_id[sid] + except KeyError: + raise errors.SerializeError("no serializer available for id %d" % sid) + + +try: + import cPickle as pickle +except ImportError: + import pickle + +assert config.PICKLE_PROTOCOL_VERSION >= 2, "pickle protocol needs to be 2 or higher" +_ser = PickleSerializer() +_serializers["pickle"] = _ser +_serializers_by_id[_ser.serializer_id] = _ser +import marshal +_ser = MarshalSerializer() +_serializers["marshal"] = _ser +_serializers_by_id[_ser.serializer_id] = _ser +try: + import cloudpickle + _ser = CloudpickleSerializer() + _serializers["cloudpickle"] = _ser + _serializers_by_id[_ser.serializer_id] = _ser +except ImportError: + pass + +try: + import dill + _ser = DillSerializer() + _serializers["dill"] = _ser + _serializers_by_id[_ser.serializer_id] = _ser +except ImportError: + pass + +try: + try: + import importlib + json = importlib.import_module(config.JSON_MODULE) + except ImportError: + json = __import__(config.JSON_MODULE) + + _ser = JsonSerializer() + _serializers["json"] = _ser + _serializers_by_id[_ser.serializer_id] = _ser +except ImportError: + pass + +try: + import serpent + _ser = SerpentSerializer() + _serializers["serpent"] = _ser + _serializers_by_id[_ser.serializer_id] = _ser +except ImportError: + log.warning("serpent serializer is not available") + +try: + import msgpack + _ser = MsgpackSerializer() + _serializers["msgpack"] = _ser + _serializers_by_id[_ser.serializer_id] = _ser +except ImportError: + pass + +del _ser + +def getAttribute(obj, attr): + """ + Resolves an attribute name to an object. Raises + an AttributeError if any attribute in the chain starts with a '``_``'. + Doesn't resolve a dotted name, because that is a security vulnerability. + It treats it as a single attribute name (and the lookup will likely fail). + """ + if is_private_attribute(attr): + raise AttributeError("attempt to access private attribute '%s'" % attr) + else: + obj = getattr(obj, attr) + if not config.REQUIRE_EXPOSE or getattr(obj, "_pyroExposed", False): + return obj + raise AttributeError("attempt to access unexposed attribute '%s'" % attr) + + +def excepthook(ex_type, ex_value, ex_tb): + """An exception hook you can use for ``sys.excepthook``, to automatically print remote Pyro tracebacks""" + traceback = "".join(getPyroTraceback(ex_type, ex_value, ex_tb)) + sys.stderr.write(traceback) + + +def fixIronPythonExceptionForPickle(exceptionObject, addAttributes): + """ + Function to hack around a bug in IronPython where it doesn't pickle + exception attributes. We piggyback them into the exception's args. + Bug report is at https://github.com/IronLanguages/main/issues/943 + Bug is still present in Ironpython 2.7.7 + """ + if hasattr(exceptionObject, "args"): + if addAttributes: + ironpythonArgs = vars(exceptionObject) + ironpythonArgs["__ironpythonargs__"] = True + exceptionObject.args += (ironpythonArgs,) + else: + if len(exceptionObject.args) > 0: + piggyback = exceptionObject.args[-1] + if type(piggyback) is dict: + if piggyback.get("__ironpythonargs__"): + del piggyback["__ironpythonargs__"] + exceptionObject.args = exceptionObject.args[None[:-1]] + exceptionObject.__dict__.update(piggyback) + + +__exposed_member_cache = {} + +def reset_exposed_members(obj, only_exposed=True, as_lists=False): + """Delete any cached exposed members forcing recalculation on next request""" + if not inspect.isclass(obj): + obj = obj.__class__ + cache_key = ( + obj, only_exposed, as_lists) + __exposed_member_cache.pop(cache_key, None) + + +def get_exposed_members(obj, only_exposed=True, as_lists=False, use_cache=True): + """ + Return public and exposed members of the given object's class. + You can also provide a class directly. + Private members are ignored no matter what (names starting with underscore). + If only_exposed is True, only members tagged with the @expose decorator are + returned. If it is False, all public members are returned. + The return value consists of the exposed methods, exposed attributes, and methods + tagged as @oneway. + (All this is used as meta data that Pyro sends to the proxy if it asks for it) + as_lists is meant for python 2 compatibility. + """ + if not inspect.isclass(obj): + obj = obj.__class__ + cache_key = (obj, only_exposed, as_lists) + if use_cache: + if cache_key in __exposed_member_cache: + return __exposed_member_cache[cache_key] + methods = set() + oneway = set() + attrs = set() + for m in dir(obj): + if is_private_attribute(m): + continue + v = getattr(obj, m) + if inspect.ismethod(v) or inspect.isfunction(v) or inspect.ismethoddescriptor(v): + if getattr(v, "_pyroExposed", not only_exposed): + methods.add(m) + if getattr(v, "_pyroOneway", False): + oneway.add(m) + elif inspect.isdatadescriptor(v): + func = getattr(v, "fget", None) or getattr(v, "fset", None) or getattr(v, "fdel", None) + if func is not None: + if getattr(func, "_pyroExposed", not only_exposed): + attrs.add(m) + + if as_lists: + methods = list(methods) + oneway = list(oneway) + attrs = list(attrs) + result = {'methods':methods, 'oneway':oneway, + 'attrs':attrs} + __exposed_member_cache[cache_key] = result + return result + + +def get_exposed_property_value(obj, propname, only_exposed=True): + """ + Return the value of an @exposed @property. + If the requested property is not a @property or not exposed, + an AttributeError is raised instead. + """ + v = getattr(obj.__class__, propname) + if inspect.isdatadescriptor(v): + if v.fget: + if getattr(v.fget, "_pyroExposed", not only_exposed): + return v.fget(obj) + raise AttributeError("attempt to access unexposed or unknown remote attribute '%s'" % propname) + + +def set_exposed_property_value(obj, propname, value, only_exposed=True): + """ + Sets the value of an @exposed @property. + If the requested property is not a @property or not exposed, + an AttributeError is raised instead. + """ + v = getattr(obj.__class__, propname) + if inspect.isdatadescriptor(v): + pfunc = v.fget or v.fset or v.fdel + if v.fset: + if getattr(pfunc, "_pyroExposed", not only_exposed): + return v.fset(obj, value) + raise AttributeError("attempt to access unexposed or unknown remote attribute '%s'" % propname) + + +_private_dunder_methods = frozenset([ + '__init__', '__init_subclass__', '__class__', '__module__', '__weakref__', + '__call__', + '__new__', '__del__', '__repr__', '__unicode__', + '__str__', '__format__', + '__nonzero__', '__bool__', '__coerce__', + '__cmp__', '__eq__', '__ne__', + '__hash__', '__ge__', '__gt__', '__le__', '__lt__', + '__dir__', '__enter__', + '__exit__', '__copy__', '__deepcopy__', '__sizeof__', + '__getattr__', '__setattr__', + '__hasattr__', '__getattribute__', '__delattr__', + '__instancecheck__', + '__subclasscheck__', '__getinitargs__', '__getnewargs__', + '__getstate__', + '__setstate__', '__reduce__', '__reduce_ex__', + '__getstate_for_dict__', + '__setstate_from_dict__', '__subclasshook__']) + +def is_private_attribute(attr_name): + """returns if the attribute name is to be considered private or not.""" + if attr_name in _private_dunder_methods: + return True + if not attr_name.startswith("_"): + return False + elif len(attr_name) > 4: + if attr_name.startswith("__") and attr_name.endswith("__"): + return False + return True \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/Pyro4/utils/__init__.py b/APPS_UNCOMPILED/lib/Pyro4/utils/__init__.py new file mode 100644 index 0000000..bfa49da --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/utils/__init__.py @@ -0,0 +1,8 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/utils/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 31 bytes +pass diff --git a/APPS_UNCOMPILED/lib/Pyro4/utils/flame.py b/APPS_UNCOMPILED/lib/Pyro4/utils/flame.py new file mode 100644 index 0000000..9add2a1 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/utils/flame.py @@ -0,0 +1,305 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/utils/flame.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 11884 bytes +""" +Pyro FLAME: Foreign Location Automatic Module Exposer. +Easy but potentially very dangerous way of exposing remote modules and builtins. +Flame requires the pickle serializer to be used. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import sys, types, code, os, stat +from Pyro4 import constants, errors, core +from Pyro4.configuration import config +try: + import importlib +except ImportError: + importlib = None + +try: + import builtins +except ImportError: + import __builtin__ as builtins + +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO + +__all__ = [ + "connect", "start", "createModule", "Flame"] +if sys.version_info > (3, 0): + + def exec_function(source, filename, global_map): + source = fixExecSourceNewlines(source) + exec(compile(source, filename, "exec"), global_map) + + +else: + eval(compile('def exec_function(source, filename, global_map):\n source=fixExecSourceNewlines(source)\n exec compile(source, filename, "exec") in global_map\n', "", "exec")) + +def fixExecSourceNewlines(source): + if sys.version_info < (2, 7) or sys.version_info[None[:2]] in ((3, 0), (3, 1)): + source = source.replace("\r\n", "\n") + source = source.rstrip() + "\n" + source = source.rstrip() + return source + + +class FlameModule(object): + __doc__ = "Proxy to a remote module." + + def __init__(self, flameserver, module): + self.flameserver = core.Proxy(flameserver._pyroDaemon.uriFor(flameserver)) + self.module = module + + def __getattr__(self, item): + if item in ('__getnewargs__', '__getnewargs_ex__', '__getinitargs__'): + raise AttributeError(item) + return core._RemoteMethod(self._FlameModule__invoke, "%s.%s" % (self.module, item), 0) + + def __getstate__(self): + return self.__dict__ + + def __setstate__(self, args): + self.__dict__ = args + + def __invoke(self, module, args, kwargs): + return self.flameserver.invokeModule(module, args, kwargs) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.flameserver._pyroRelease() + + def __repr__(self): + return "<%s.%s at 0x%x; module '%s' at %s>" % (self.__class__.__module__, self.__class__.__name__, + id(self), self.module, self.flameserver._pyroUri.location) + + +class FlameBuiltin(object): + __doc__ = "Proxy to a remote builtin function." + + def __init__(self, flameserver, builtin): + self.flameserver = core.Proxy(flameserver._pyroDaemon.uriFor(flameserver)) + self.builtin = builtin + + def __call__(self, *args, **kwargs): + return self.flameserver.invokeBuiltin(self.builtin, args, kwargs) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.flameserver._pyroRelease() + + def __repr__(self): + return "<%s.%s at 0x%x; builtin '%s' at %s>" % (self.__class__.__module__, self.__class__.__name__, + id(self), self.builtin, self.flameserver._pyroUri.location) + + +class RemoteInteractiveConsole(object): + __doc__ = "Proxy to a remote interactive console." + + class LineSendingConsole(code.InteractiveConsole): + __doc__ = "makes sure the lines are sent to the remote console" + + def __init__(self, remoteconsole): + code.InteractiveConsole.__init__(self, filename="") + self.remoteconsole = remoteconsole + + def push(self, line): + output, more = self.remoteconsole.push_and_get_output(line) + if output: + sys.stdout.write(output) + return more + + def __init__(self, remoteconsoleuri): + self.remoteconsole = core.Proxy(remoteconsoleuri) + + def interact(self): + console = self.LineSendingConsole(self.remoteconsole) + console.interact(banner=(self.remoteconsole.get_banner())) + print("(Remote session ended)") + + def close(self): + self.remoteconsole.terminate() + self.remoteconsole._pyroRelease() + + def terminate(self): + self.close() + + def __repr__(self): + return "<%s.%s at 0x%x; for %s>" % (self.__class__.__module__, self.__class__.__name__, + id(self), self.remoteconsole._pyroUri.location) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + +@core.expose +class InteractiveConsole(code.InteractiveConsole): + __doc__ = "Interactive console wrapper that saves output written to stdout so it can be returned as value" + + def push_and_get_output(self, line): + output, more = ('', False) + stdout_save = sys.stdout + try: + sys.stdout = StringIO() + more = self.push(line) + output = sys.stdout.getvalue() + sys.stdout.close() + finally: + sys.stdout = stdout_save + + return ( + output, more) + + def get_banner(self): + return self.banner + + def write(self, data): + sys.stdout.write(data) + + def terminate(self): + self._pyroDaemon.unregister(self) + self.resetbuffer() + + +@core.expose +class Flame(object): + __doc__ = "\n The actual FLAME server logic.\n Usually created by using :py:meth:`core.Daemon.startFlame`.\n Be *very* cautious before starting this: it allows the clients full access to everything on your system.\n " + + def __init__(self): + if set(config.SERIALIZERS_ACCEPTED) != {"pickle"}: + raise RuntimeError("flame requires the pickle serializer exclusively") + + def module(self, name): + """ + Import a module on the server given by the module name and returns a proxy to it. + The returned proxy does not support direct attribute access, if you want that, + you should use the ``evaluate`` method instead. + """ + if importlib: + importlib.import_module(name) + else: + __import__(name) + return FlameModule(self, name) + + def builtin(self, name): + """returns a proxy to the given builtin on the server""" + return FlameBuiltin(self, name) + + def execute(self, code): + """execute a piece of code""" + exec_function(code, "", globals()) + + def evaluate(self, expression): + """evaluate an expression and return its result""" + return eval(expression) + + def sendmodule(self, modulename, modulesource): + """ + Send the source of a module to the server and make the server load it. + Note that you still have to actually ``import`` it on the server to access it. + Sending a module again will replace the previous one with the new. + """ + createModule(modulename, modulesource) + + def getmodule(self, modulename): + """obtain the source code from a module on the server""" + import inspect + module = __import__(modulename, globals={}, locals={}) + return inspect.getsource(module) + + def sendfile(self, filename, filedata): + """store a new file on the server""" + with open(filename, "wb") as targetfile: + os.chmod(filename, stat.S_IRUSR | stat.S_IWUSR) + targetfile.write(filedata) + + def getfile(self, filename): + """read any accessible file from the server""" + with open(filename, "rb") as diskfile: + return diskfile.read() + + def console(self): + """get a proxy for a remote interactive console session""" + console = InteractiveConsole(filename="") + uri = self._pyroDaemon.register(console) + console.banner = "Python %s on %s\n(Remote console on %s)" % (sys.version, sys.platform, uri.location) + return RemoteInteractiveConsole(uri) + + @core.expose + def invokeBuiltin(self, builtin, args, kwargs): + return (getattr(builtins, builtin))(*args, **kwargs) + + @core.expose + def invokeModule(self, dottedname, args, kwargs): + modulename, dottedname = dottedname.split(".", 1) + module = sys.modules[modulename] + method = module + for attr in dottedname.split("."): + method = getattr(method, attr) + + return method(*args, **kwargs) + + +def createModule(name, source, filename='', namespace=None): + """ + Utility function to create a new module with the given name (dotted notation allowed), directly from the source string. + Adds it to sys.modules, and returns the new module object. + If you provide a namespace dict (such as ``globals()``), it will import the module into that namespace too. + """ + path = "" + components = name.split(".") + module = types.ModuleType("pyro-flame-module-context") + for component in components: + path += "." + component + real_path = path[1[:None]] + if real_path in sys.modules: + module = sys.modules[real_path] + else: + setattr(module, component, types.ModuleType(real_path)) + module = getattr(module, component) + sys.modules[real_path] = module + + exec_function(source, filename, module.__dict__) + if namespace is not None: + namespace[components[0]] = __import__(name) + return module + + +def start(daemon): + """ + Create and register a Flame server in the given daemon. + Be *very* cautious before starting this: it allows the clients full access to everything on your system. + """ + if config.FLAME_ENABLED: + if set(config.SERIALIZERS_ACCEPTED) != {"pickle"}: + raise errors.SerializeError("Flame requires the pickle serializer exclusively") + return daemon.register(Flame(), constants.FLAME_NAME) + raise errors.SecurityError("Flame is disabled in the server configuration") + + +def connect(location, hmac_key=None): + """ + Connect to a Flame server on the given location, for instance localhost:9999 or ./u:unixsock + This is just a convenience function to creates an appropriate Pyro proxy. + """ + if config.SERIALIZER != "pickle": + raise errors.SerializeError("Flame requires the pickle serializer") + proxy = core.Proxy("PYRO:%s@%s" % (constants.FLAME_NAME, location)) + if hmac_key: + proxy._pyroHmacKey = hmac_key + proxy._pyroBind() + return proxy diff --git a/APPS_UNCOMPILED/lib/Pyro4/utils/flameserver.py b/APPS_UNCOMPILED/lib/Pyro4/utils/flameserver.py new file mode 100644 index 0000000..a4de853 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/utils/flameserver.py @@ -0,0 +1,68 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/utils/flameserver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2593 bytes +""" +Pyro FLAME: Foreign Location Automatic Module Exposer. +Easy but potentially very dangerous way of exposing remote modules and builtins. +This is the commandline server. + +You can start this module as a script from the command line, to easily get a +flame server running: + + :command:`python -m Pyro4.utils.flameserver` + or simply: :command:`pyro4-flameserver` + +You have to explicitly enable Flame first though by setting the FLAME_ENABLED config item. + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import sys, os, warnings +from Pyro4.configuration import config +from Pyro4 import core +from Pyro4.utils import flame + +def main(args=None, returnWithoutLooping=False): + from optparse import OptionParser + parser = OptionParser() + parser.add_option("-H", "--host", default="localhost", help="hostname to bind server on (default=%default)") + parser.add_option("-p", "--port", type="int", default=0, help="port to bind server on") + parser.add_option("-u", "--unixsocket", help="Unix domain socket name to bind server on") + parser.add_option("-q", "--quiet", action="store_true", default=False, help="don't output anything") + parser.add_option("-k", "--key", help="the HMAC key to use (deprecated)") + options, args = parser.parse_args(args) + if options.key: + warnings.warn("using -k to supply HMAC key on the command line is a security problem and is deprecated since Pyro 4.72. See the documentation for an alternative.") + if "PYRO_HMAC_KEY" in os.environ: + if options.key: + raise SystemExit("error: don't use -k and PYRO_HMAC_KEY at the same time") + options.key = os.environ["PYRO_HMAC_KEY"] + if not options.quiet: + print("Starting Pyro Flame server.") + hmac = (options.key or "").encode("utf-8") + if not hmac: + if not options.quiet: + print("Warning: HMAC key not set. Anyone can connect to this server!") + config.SERIALIZERS_ACCEPTED = { + "pickle"} + daemon = core.Daemon(host=(options.host), port=(options.port), unixsocket=(options.unixsocket)) + if hmac: + daemon._pyroHmacKey = hmac + uri = flame.start(daemon) + if not options.quiet: + print("server uri: %s" % uri) + print("server is running.") + if returnWithoutLooping: + return ( + daemon, uri) + daemon.requestLoop() + daemon.close() + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/APPS_UNCOMPILED/lib/Pyro4/utils/httpgateway.py b/APPS_UNCOMPILED/lib/Pyro4/utils/httpgateway.py new file mode 100644 index 0000000..03ad441 --- /dev/null +++ b/APPS_UNCOMPILED/lib/Pyro4/utils/httpgateway.py @@ -0,0 +1,276 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/utils/httpgateway.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 16750 bytes +""" +HTTP gateway: connects the web browser's world of javascript+http and Pyro. +Creates a stateless HTTP server that essentially is a proxy for the Pyro objects behind it. +It exposes the Pyro objects through a HTTP interface and uses the JSON serializer, +so that you can immediately process the response data in the browser. + +You can start this module as a script from the command line, to easily get a +http gateway server running: + + :command:`python -m Pyro4.utils.httpgateway` + or simply: :command:`pyro4-httpgateway` + +It is also possible to import the 'pyro_app' function and stick that into a WSGI +server of your choice, to have more control. + +The javascript code in the web page of the gateway server works with the same-origin +browser policy because it is served by the gateway itself. If you want to access it +from scripts in different sites, you have to work around this or embed the gateway app +in your site. Non-browser clients that access the http api have no problems. +See the `http` example for two of such clients (node.js and python). + +Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net). +""" +from __future__ import print_function +import sys, re, cgi, os, uuid, warnings +from wsgiref.simple_server import make_server +import traceback +from Pyro4.util import json +from Pyro4.configuration import config +from Pyro4 import constants, errors, core, message, util, naming +__all__ = [ + "pyro_app", "main"] +_nameserver = None + +def get_nameserver(hmac=None): + global _nameserver + if not _nameserver: + _nameserver = naming.locateNS(hmac_key=hmac) + try: + _nameserver.ping() + return _nameserver + except errors.ConnectionClosedError: + _nameserver = None + print("Connection with nameserver lost, reconnecting...") + return get_nameserver(hmac) + + +def invalid_request(start_response): + """Called if invalid http method.""" + start_response("405 Method Not Allowed", [('Content-Type', 'text/plain')]) + return [b'Error 405: Method Not Allowed'] + + +def not_found(start_response): + """Called if Url not found.""" + start_response("404 Not Found", [('Content-Type', 'text/plain')]) + return [b'Error 404: Not Found'] + + +def redirect(start_response, target): + """Called to do a redirect""" + start_response("302 Found", [("Location", target)]) + return [] + + +index_page_template = '\n\n\n Pyro HTTP gateway\n \n\n\n \n \n\n
\n

Pyro HTTP gateway

\n

\n Use http+json to talk to Pyro objects.\n Docs.\n

\n
\n

Note: performance isn\'t a key concern here; it is a stateless server.\n It does a name lookup and uses a new Pyro proxy for each request.

\n

Currently exposed contents of name server on {hostname}:

\n

(Limited to 10 entries, exposed name pattern = \'{ns_regex}\')

\n{name_server_contents_list}\n

Name server examples: (these examples are working if you expose the Pyro.NameServer object)

\n\n

Echoserver examples: (these examples are working if you expose the test.echoserver object)

\n\n

Pyro response data (via Ajax):

\nCall:
   
\nResponse:
   
\n

Pyro version: {pyro_version} — © Irmen de Jong

\n\n\n' + +def return_homepage(environ, start_response): + try: + nameserver = get_nameserver(hmac=(pyro_app.hmac_key)) + except errors.NamingError as x: + try: + print("Name server error:", x) + start_response("500 Internal Server Error", [('Content-Type', 'text/plain')]) + return [b'Cannot connect to the Pyro name server. Is it running? Refresh page to retry.'] + finally: + x = None + del x + + start_response("200 OK", [('Content-Type', 'text/html')]) + nslist = [""] + names = sorted(list(nameserver.list(regex=(pyro_app.ns_regex)).keys())[None[:10]]) + with core.batch(nameserver) as nsbatch: + for name in names: + nsbatch.lookup(name) + + for name, uri in zip(names, nsbatch()): + attributes = "-" + try: + with core.Proxy(uri) as proxy: + proxy._pyroHmacKey = pyro_app.hmac_key + proxy._pyroBind() + methods = "   ".join(proxy._pyroMethods) or "-" + attributes = ['{attribute}'.format(name=name, attribute=attribute) for attribute in proxy._pyroAttrs] + attributes = "   ".join(attributes) or "-" + except errors.PyroError as x: + try: + stderr = environ["wsgi.errors"] + print(("ERROR getting metadata for {0}:".format(uri)), file=stderr) + traceback.print_exc(file=stderr) + methods = "??error:%s??" % str(x) + finally: + x = None + del x + + nslist.append(''.format(name=name, + methods=methods, + attributes=attributes)) + + nslist.append("
Namemethodsattributes (zero-param methods)
{name}{methods}{attributes}
") + index_page = index_page_template.format(ns_regex=(pyro_app.ns_regex), name_server_contents_list=("".join(nslist)), + pyro_version=(constants.VERSION), + hostname=(nameserver._pyroUri.location)) + return [index_page.encode("utf-8")] + + +def process_pyro_request(environ, path, parameters, start_response): + pyro_options = environ.get("HTTP_X_PYRO_OPTIONS", "").split(",") + if not path: + return return_homepage(environ, start_response) + else: + matches = re.match("(.+)/(.+)", path) + return matches or not_found(start_response) + object_name, method = matches.groups() + if pyro_app.gateway_key: + gateway_key = environ.get("HTTP_X_PYRO_GATEWAY_KEY", "") or parameters.get("$key", "") + gateway_key = gateway_key.encode("utf-8") + if gateway_key != pyro_app.gateway_key: + start_response("403 Forbidden", [('Content-Type', 'text/plain')]) + return [b'403 Forbidden - incorrect gateway api key'] + if "$key" in parameters: + del parameters["$key"] + if pyro_app.ns_regex: + if not re.match(pyro_app.ns_regex, object_name): + start_response("403 Forbidden", [('Content-Type', 'text/plain')]) + return [b'403 Forbidden - access to the requested object has been denied'] + try: + nameserver = get_nameserver(hmac=(pyro_app.hmac_key)) + uri = nameserver.lookup(object_name) + with core.Proxy(uri) as proxy: + header_corr_id = environ.get("HTTP_X_PYRO_CORRELATION_ID", "") + if header_corr_id: + core.current_context.correlation_id = uuid.UUID(header_corr_id) + else: + core.current_context.correlation_id = uuid.uuid4() + proxy._pyroHmacKey = pyro_app.hmac_key + proxy._pyroGetMetadata() + if "oneway" in pyro_options: + proxy._pyroOneway.add(method) + elif method == "$meta": + result = {'methods':tuple(proxy._pyroMethods), + 'attributes':tuple(proxy._pyroAttrs)} + reply = json.dumps(result).encode("utf-8") + start_response("200 OK", [('Content-Type', 'application/json; charset=utf-8'), + ( + "X-Pyro-Correlation-Id", str(core.current_context.correlation_id))]) + return [reply] + proxy._pyroRawWireResponse = True + if method in proxy._pyroAttrs: + if parameters: + raise AssertionError("attribute lookup can't have query parameters") + msg = getattr(proxy, method) + else: + msg = (getattr(proxy, method))(**parameters) + if msg is None or "oneway" in pyro_options: + start_response("200 OK", [('Content-Type', 'application/json; charset=utf-8'), + ( + "X-Pyro-Correlation-Id", str(core.current_context.correlation_id))]) + return [] + if msg.flags & message.FLAGS_EXCEPTION: + start_response("500 Internal Server Error", [('Content-Type', 'application/json; charset=utf-8')]) + return [msg.data] + start_response("200 OK", [('Content-Type', 'application/json; charset=utf-8'), + ( + "X-Pyro-Correlation-Id", str(core.current_context.correlation_id))]) + return [msg.data] + except Exception as x: + try: + stderr = environ["wsgi.errors"] + print(("ERROR handling {0} with params {1}:".format(path, parameters)), file=stderr) + traceback.print_exc(file=stderr) + start_response("500 Internal Server Error", [('Content-Type', 'application/json; charset=utf-8')]) + reply = json.dumps(util.SerializerBase.class_to_dict(x)).encode("utf-8") + return [reply] + finally: + x = None + del x + + +def pyro_app(environ, start_response): + """ + The WSGI app function that is used to process the requests. + You can stick this into a wsgi server of your choice, or use the main() method + to use the default wsgiref server. + """ + config.SERIALIZER = "json" + config.COMMTIMEOUT = pyro_app.comm_timeout + method = environ.get("REQUEST_METHOD") + path = environ.get("PATH_INFO", "").lstrip("/") + if not path: + return redirect(start_response, "/pyro/") + if path.startswith("pyro/"): + if method in ('GET', 'POST'): + parameters = singlyfy_parameters(cgi.parse(environ["wsgi.input"], environ)) + return process_pyro_request(environ, path[5[:None]], parameters, start_response) + return invalid_request(start_response) + return not_found(start_response) + + +def singlyfy_parameters(parameters): + """ + Makes a cgi-parsed parameter dictionary into a dict where the values that + are just a list of a single value, are converted to just that single value. + """ + for key, value in parameters.items(): + if isinstance(value, (list, tuple)) and len(value) == 1: + parameters[key] = value[0] + + return parameters + + +pyro_app.ns_regex = "http\\." +pyro_app.hmac_key = None +pyro_app.gateway_key = None +pyro_app.comm_timeout = config.COMMTIMEOUT + +def main(args=None): + from optparse import OptionParser + parser = OptionParser() + parser.add_option("-H", "--host", default="localhost", help="hostname to bind server on (default=%default)") + parser.add_option("-p", "--port", type="int", default=8080, help="port to bind server on (default=%default)") + parser.add_option("-e", "--expose", default=(pyro_app.ns_regex), help="a regex of object names to expose (default=%default)") + parser.add_option("-k", "--pyrokey", help="the HMAC key to use to connect with Pyro (deprecated)") + parser.add_option("-g", "--gatewaykey", help="the api key to use to connect to the gateway itself") + parser.add_option("-t", "--timeout", type="float", default=(pyro_app.comm_timeout), help="Pyro timeout value to use (COMMTIMEOUT setting, default=%default)") + options, args = parser.parse_args(args) + if options.pyrokey or options.gatewaykey: + warnings.warn("using -k and/or -g to supply keys on the command line is a security problem and is deprecated since Pyro 4.72. See the documentation for an alternative.") + if "PYRO_HMAC_KEY" in os.environ: + if options.pyrokey: + raise SystemExit("error: don't use -k and PYRO_HMAC_KEY at the same time") + options.pyrokey = os.environ["PYRO_HMAC_KEY"] + elif "PYRO_HTTPGATEWAY_KEY" in os.environ: + if options.gatewaykey: + raise SystemExit("error: don't use -g and PYRO_HTTPGATEWAY_KEY at the same time") + options.gatewaykey = os.environ["PYRO_HTTPGATEWAY_KEY"] + pyro_app.hmac_key = (options.pyrokey or "").encode("utf-8") + pyro_app.gateway_key = (options.gatewaykey or "").encode("utf-8") + pyro_app.ns_regex = options.expose + pyro_app.comm_timeout = config.COMMTIMEOUT = options.timeout + if pyro_app.ns_regex: + print("Exposing objects with names matching: ", pyro_app.ns_regex) + else: + print("Warning: exposing all objects (no expose regex set)") + try: + ns = get_nameserver(hmac=(pyro_app.hmac_key)) + except errors.PyroError: + print("Not yet connected to a name server.") + else: + print("Connected to name server at: ", ns._pyroUri) + server = make_server(options.host, options.port, pyro_app) + print(("Pyro HTTP gateway running on http://{0}:{1}/pyro/".format)(*server.socket.getsockname())) + server.serve_forever() + server.server_close() + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/APPS_UNCOMPILED/lib/autologging.py b/APPS_UNCOMPILED/lib/autologging.py new file mode 100644 index 0000000..e18c7ec --- /dev/null +++ b/APPS_UNCOMPILED/lib/autologging.py @@ -0,0 +1,1209 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/autologging.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 47165 bytes +__author__ = "Matthew Zipay (mattzATninthtestDOTinfo)" +__version__ = "1.3.2" +from functools import wraps +from inspect import isclass, isgenerator, isroutine +import logging, os, platform, sys +from types import FunctionType +import warnings +try: + _py_impl = platform.python_implementation() +except: + _py_impl = "Python" + +_is_jython = _py_impl == "Jython" and getattr(sys, "JYTHON_JAR", None) is not None +try: + import clr + clr.AddReference("System") + _has_clr = True +except: + _has_clr = False + +_is_ironpython = _py_impl == "IronPython" and _has_clr +__all__ = [ + "TRACE", + "logged", + "traced", + "install_traced_noop"] +TRACE = 1 +logging.addLevelName(TRACE, "TRACE") + +def logged(obj): + """Add a logger member to a decorated class or function. + + :arg obj: + the class or function object being decorated, or an optional + :class:`logging.Logger` object to be used as the parent logger + (instead of the default module-named logger) + :return: + *obj* if *obj* is a class or function; otherwise, if *obj* is a + logger, return a lambda decorator that will in turn set the + logger attribute and return *obj* + + If *obj* is a :obj:`class`, then ``obj.__log`` will have the logger + name ".": + + >>> import sys + >>> logging.basicConfig( + ... level=logging.DEBUG, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @logged + ... class Sample: + ... + ... def test(self): + ... self.__log.debug("This is a test.") + ... + >>> Sample().test() + DEBUG:autologging.Sample:test:This is a test. + + .. note:: + Autologging will prefer to use the class's ``__qualname__`` when + it is available (Python 3.3+). Otherwise, the class's + ``__name__`` is used. For example:: + + class Outer: + + @logged + class Nested: pass + + Under Python 3.3+, ``Nested.__log`` will have the name + "autologging.Outer.Nested", while under Python 2.7 or 3.2, the + logger name will be "autologging.Nested". + + .. versionchanged:: 0.4.0 + Functions decorated with ``@logged`` use a *single* underscore + in the logger variable name (e.g. ``my_function._log``) rather + than a double underscore. + + If *obj* is a function, then ``obj._log`` will have the logger name + "": + + >>> import sys + >>> logging.basicConfig( + ... level=logging.DEBUG, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @logged + ... def test(): + ... test._log.debug("This is a test.") + ... + >>> test() + DEBUG:autologging:test:This is a test. + + .. note:: + Within a logged function, the ``_log`` attribute must be + qualified by the function name. + + If *obj* is a :class:`logging.Logger` object, then that logger is + used as the parent logger (instead of the default module-named + logger): + + >>> import sys + >>> logging.basicConfig( + ... level=logging.DEBUG, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @logged(logging.getLogger("test.parent")) + ... class Sample: + ... def test(self): + ... self.__log.debug("This is a test.") + ... + >>> Sample().test() + DEBUG:test.parent.Sample:test:This is a test. + + Again, functions are similar: + + >>> import sys + >>> logging.basicConfig( + ... level=logging.DEBUG, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @logged(logging.getLogger("test.parent")) + ... def test(): + ... test._log.debug("This is a test.") + ... + >>> test() + DEBUG:test.parent:test:This is a test. + + .. note:: + For classes, the logger member is made "private" (i.e. ``__log`` + with double underscore) to ensure that log messages that include + the *%(name)s* format placeholder are written with the correct + name. + + Consider a subclass of a ``@logged``-decorated parent class. If + the subclass were **not** decorated with ``@logged`` and could + access the parent's logger member directly to make logging + calls, those log messages would display the name of the + parent class, not the subclass. + + Therefore, subclasses of a ``@logged``-decorated parent class + that wish to use a provided ``self.__log`` object must themselves + be decorated with ``@logged``. + + .. warning:: + Although the ``@logged`` and ``@traced`` decorators will "do the + right thing" regardless of the order in which they are applied to + the same function, it is recommended that ``@logged`` always be + used as the innermost decorator:: + + @traced + @logged + def my_function(): + my_function._log.info("message") + + This is because ``@logged`` simply sets the ``_log`` attribute + and then returns the original function, making it "safe" to use + in combination with any other decorator. + + .. note:: + Both `Jython `_ and + `IronPython `_ report an "internal" class + name using its mangled form, which will be reflected in the + default logger name. + + For example, in the sample code below, both Jython and IronPython + will use the default logger name "autologging._Outer__Nested" + (whereas CPython/PyPy/Stackless would use "autologging.__Nested" + under Python 2 or "autologging.Outer.__Nested" under Python 3.3+) + :: + + class Outer: + @logged + class __Nested: + pass + + .. warning:: + `IronPython `_ does not fully support + frames (even with the -X:FullFrames option), so you are likely to + see things like misreported line numbers and "" in + log records emitted when running under IronPython. + + """ + if isinstance(obj, logging.Logger): + return (lambda class_or_fn: _add_logger_to(class_or_fn, + logger_name=_generate_logger_name(class_or_fn, + parent_name=(obj.name)))) + return _add_logger_to(obj) + + +def traced(*args, **keywords): + """Add call and return tracing to an unbound function or to the + methods of a class. + + The arguments to ``traced`` differ depending on whether it is being + used to trace an unbound function or the methods of a class: + + .. rubric:: Trace an unbound function using the default logger + + :arg func: the unbound function to be traced + + By default, a logger named for the function's module is used: + + >>> import sys + >>> logging.basicConfig( + ... level=TRACE, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @traced + ... def func(x, y): + ... return x + y + ... + >>> func(7, 9) + TRACE:autologging:func:CALL *(7, 9) **{} + TRACE:autologging:func:RETURN 16 + 16 + + .. rubric:: Trace an unbound function using a named logger + + :arg logging.Logger logger: + the parent logger used to trace the unbound function + + >>> import sys + >>> logging.basicConfig( + ... level=TRACE, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @traced(logging.getLogger("my.channel")) + ... def func(x, y): + ... return x + y + ... + >>> func(7, 9) + TRACE:my.channel:func:CALL *(7, 9) **{} + TRACE:my.channel:func:RETURN 16 + 16 + + .. rubric:: Trace default methods using the default logger + + :arg class_: the class whose methods will be traced + + By default, all "public", "_nonpublic", and "__internal" methods, as + well as the special "__init__" and "__call__" methods, will be + traced. Tracing log entries will be written to a logger named for + the module and class: + + >>> import sys + >>> logging.basicConfig( + ... level=TRACE, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @traced + ... class Class: + ... def __init__(self, x): + ... self._x = x + ... def public(self, y): + ... return self._x + y + ... def _nonpublic(self, y): + ... return self._x - y + ... def __internal(self, y=2): + ... return self._x ** y + ... def __repr__(self): + ... return "Class(%r)" % self._x + ... def __call__(self): + ... return self._x + ... + >>> obj = Class(7) + TRACE:autologging.Class:__init__:CALL *(7,) **{} + >>> obj.public(9) + TRACE:autologging.Class:public:CALL *(9,) **{} + TRACE:autologging.Class:public:RETURN 16 + 16 + >>> obj._nonpublic(5) + TRACE:autologging.Class:_nonpublic:CALL *(5,) **{} + TRACE:autologging.Class:_nonpublic:RETURN 2 + 2 + >>> obj._Class__internal(y=3) + TRACE:autologging.Class:__internal:CALL *() **{'y': 3} + TRACE:autologging.Class:__internal:RETURN 343 + 343 + >>> repr(obj) # not traced by default + 'Class(7)' + >>> obj() + TRACE:autologging.Class:__call__:CALL *() **{} + TRACE:autologging.Class:__call__:RETURN 7 + 7 + + .. note:: + When the runtime Python version is >= 3.3, the *qualified* class + name will be used to name the tracing logger (i.e. a nested class + will write tracing log entries to a logger named + "module.Parent.Nested"). + + .. rubric:: Trace default methods using a named logger + + :arg logging.Logger logger: + the parent logger used to trace the methods of the class + + By default, all "public", "_nonpublic", and "__internal" methods, as + well as the special "__init__" method, will be traced. Tracing log + entries will be written to the specified logger: + + >>> import sys + >>> logging.basicConfig( + ... level=TRACE, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @traced(logging.getLogger("my.channel")) + ... class Class: + ... def __init__(self, x): + ... self._x = x + ... def public(self, y): + ... return self._x + y + ... def _nonpublic(self, y): + ... return self._x - y + ... def __internal(self, y=2): + ... return self._x ** y + ... def __repr__(self): + ... return "Class(%r)" % self._x + ... def __call__(self): + ... return self._x + ... + >>> obj = Class(7) + TRACE:my.channel.Class:__init__:CALL *(7,) **{} + >>> obj.public(9) + TRACE:my.channel.Class:public:CALL *(9,) **{} + TRACE:my.channel.Class:public:RETURN 16 + 16 + >>> obj._nonpublic(5) + TRACE:my.channel.Class:_nonpublic:CALL *(5,) **{} + TRACE:my.channel.Class:_nonpublic:RETURN 2 + 2 + >>> obj._Class__internal(y=3) + TRACE:my.channel.Class:__internal:CALL *() **{'y': 3} + TRACE:my.channel.Class:__internal:RETURN 343 + 343 + >>> repr(obj) # not traced by default + 'Class(7)' + >>> obj() + TRACE:my.channel.Class:__call__:CALL *() **{} + TRACE:my.channel.Class:__call__:RETURN 7 + 7 + + .. rubric:: Trace specified methods using the default logger + + :arg tuple method_names: + the names of the methods that will be traced + + Tracing log entries will be written to a logger named for the + module and class: + + >>> import sys + >>> logging.basicConfig( + ... level=TRACE, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @traced("public", "__internal") + ... class Class: + ... def __init__(self, x): + ... self._x = x + ... def public(self, y): + ... return self._x + y + ... def _nonpublic(self, y): + ... return self._x - y + ... def __internal(self, y=2): + ... return self._x ** y + ... def __repr__(self): + ... return "Class(%r)" % self._x + ... def __call__(self): + ... return self._x + ... + >>> obj = Class(7) + >>> obj.public(9) + TRACE:autologging.Class:public:CALL *(9,) **{} + TRACE:autologging.Class:public:RETURN 16 + 16 + >>> obj._nonpublic(5) + 2 + >>> obj._Class__internal(y=3) + TRACE:autologging.Class:__internal:CALL *() **{'y': 3} + TRACE:autologging.Class:__internal:RETURN 343 + 343 + >>> repr(obj) + 'Class(7)' + >>> obj() + 7 + + .. warning:: + When method names are specified explicitly via *args*, + Autologging ensures that each method is actually defined in + the body of the class being traced. (This means that inherited + methods that are not overridden are **never** traced, even if + they are named explicitly in *args*.) + + If a defintion for any named method is not found in the class + body, either because the method is inherited or because the + name is misspelled, Autologging will issue a :exc:`UserWarning`. + + If you wish to trace a method from a super class, you have two + options: + + 1. Use ``traced`` to decorate the super class. + 2. Override the method and trace it in the subclass. + + .. note:: + When the runtime Python version is >= 3.3, the *qualified* class + name will be used to name the tracing logger (i.e. a nested class + will write tracing log entries to a logger named + "module.Parent.Nested"). + + .. rubric:: Trace specified methods using a named logger + + :arg logging.Logger logger: + the parent logger used to trace the methods of the class + :arg tuple method_names: + the names of the methods that will be traced + + >>> import sys + >>> logging.basicConfig( + ... level=TRACE, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @traced(logging.getLogger("my.channel"), "public", "__internal") + ... class Class: + ... def __init__(self, x): + ... self._x = x + ... def public(self, y): + ... return self._x + y + ... def _nonpublic(self, y): + ... return self._x - y + ... def __internal(self, y=2): + ... return self._x ** y + ... def __repr__(self): + ... return "Class(%r)" % self._x + ... def __call__(self): + ... return self._x + ... + >>> obj = Class(7) + >>> obj.public(9) + TRACE:my.channel.Class:public:CALL *(9,) **{} + TRACE:my.channel.Class:public:RETURN 16 + 16 + >>> obj._nonpublic(5) + 2 + >>> obj._Class__internal(y=3) + TRACE:my.channel.Class:__internal:CALL *() **{'y': 3} + TRACE:my.channel.Class:__internal:RETURN 343 + 343 + >>> repr(obj) # not traced by default + 'Class(7)' + >>> obj() + 7 + + .. warning:: + When method names are specified explicitly via *args*, + Autologging ensures that each method is actually defined in + the body of the class being traced. (This means that inherited + methods that are not overridden are **never** traced, even if + they are named explicitly in *args*.) + + If a defintion for any named method is not found in the class + body, either because the method is inherited or because the + name is misspelled, Autologging will issue a :exc:`UserWarning`. + + If you wish to trace a method from a super class, you have two + options: + + 1. Use ``traced`` to decorate the super class. + 2. Override the method and trace it in the subclass. + + .. rubric:: Exclude specified methods from tracing + + .. versionadded:: 1.3.0 + + :arg tuple method_names: + the names of the methods that will be excluded from tracing + :keyword bool exclude: + ``True`` to cause the method names list to be interpreted as + an exclusion list (``False`` is the default, and causes the named + methods to be **included** as described above) + + The example below demonstrates exclusions using the default logger. + + >>> import sys + >>> logging.basicConfig( + ... level=TRACE, stream=sys.stdout, + ... format="%(levelname)s:%(name)s:%(funcName)s:%(message)s") + >>> @traced("_nonpublic", "__internal", exclude=True) + ... class Class: + ... def __init__(self, x): + ... self._x = x + ... def public(self, y): + ... return self._x + y + ... def _nonpublic(self, y): + ... return self._x - y + ... def __internal(self, y=2): + ... return self._x ** y + ... def __repr__(self): + ... return "Class(%r)" % self._x + ... def __call__(self): + ... return self._x + ... + >>> obj = Class(7) + >>> obj.public(9) + TRACE:autologging.Class:public:CALL *(9,) **{} + TRACE:autologging.Class:public:RETURN 16 + 16 + >>> obj._nonpublic(5) + 2 + >>> obj._Class__internal(y=3) + 343 + >>> repr(obj) + 'Class(7)' + >>> obj() + TRACE:autologging.Class:__call__:CALL *() **{} + TRACE:autologging.Class:__call__:RETURN 7 + 7 + + When method names are excluded via *args* and the *exclude* keyword, + Autologging **ignores** methods that are not actually defined in the + body of the class being traced. + + .. warning:: + If an exclusion list causes the list of traceable methods to + resolve empty, then Autologging will issue a :exc:`UserWarning`. + + .. note:: + When the runtime Python version is >= 3.3, the *qualified* class + name will be used to name the tracing logger (i.e. a nested class + will write tracing log entries to a logger named + "module.Parent.Nested"). + + .. note:: + When tracing a class, if the default (class-named) logger is + used **and** the runtime Python version is >= 3.3, then the + *qualified* class name will be used to name the tracing logger + (i.e. a nested class will write tracing log entries to a logger + named "module.Parent.Nested"). + + .. note:: + If method names are specified when decorating a function, a + :exc:`UserWarning` is issued, but the methods names are ignored + and the function is traced as though the method names had not + been specified. + + .. note:: + Both `Jython `_ and + `IronPython `_ report an "internal" class + name using its mangled form, which will be reflected in the + default tracing logger name. + + For example, in the sample code below, both Jython and IronPython + will use the default tracing logger name + "autologging._Outer__Nested" (whereas CPython/PyPy/Stackless + would use "autologging.__Nested" under Python 2 or + "autologging.Outer.__Nested" under Python 3.3+):: + + class Outer: + @traced + class __Nested: + pass + + .. warning:: + Neither `Jython `_ nor + `IronPython `_ currently implement the + ``function.__code__.co_lnotab`` attribute, so the last line + number of a function cannot be determined by Autologging. + + .. versionchanged:: 1.3.1 + Due to unavoidable inconsistencies in line number tracking across + Python variants (see + `issues/6 `_, as + of version 1.3.1 and until further notice Autologging will only + record the first line number of the function being traced in all + tracing CALL and RETURN records. + (Note that YIELD tracing records for generator iterators will + continue to record the correct line number on variants other than + IronPython.) + + """ + global traced + obj = args[0] if args else None + if obj is None: + return traced + if isclass(obj): + return _install_traceable_methods(obj, exclude=(keywords.get("exclude", False))) + if isroutine(obj): + return _make_traceable_function(obj, logging.getLogger(_generate_logger_name(obj))) + if isinstance(obj, logging.Logger): + method_names = args[1[:None]] + exclude = keywords.get("exclude", False) + + def traced_decorator(class_or_fn): + if isclass(class_or_fn): + return _install_traceable_methods( + class_or_fn, *method_names, **{'exclude':exclude, 'logger':(logging.getLogger)(_generate_logger_name(class_or_fn, + parent_name=(obj.name)))}) + if method_names: + warnings.warn("ignoring method names for @traced function %s.%s" % ( + class_or_fn.__module__, class_or_fn.__name__)) + return _make_traceable_function(class_or_fn, obj) + + return traced_decorator + method_names = args[None[:None]] + exclude = keywords.get("exclude", False) + return (lambda class_: _install_traceable_methods(class_, *method_names, **{"exclude": exclude})) + + +__traced_original = traced + +def _traced_noop(*args, **keywords): + """Turn the ``@traced`` decorator into a no-op.""" + obj = args[0] if args else None + if obj is None: + return _traced_noop + if isclass(obj) or isroutine(obj): + return obj + + def traced_noop_decorator(class_or_fn): + return class_or_fn + + return traced_noop_decorator + + +def install_traced_noop(): + """Replace the :func:`traced` decorator with an identity (no-op) + decorator. + + Although the overhead of a ``@traced`` function or method is minimal + when the :data:`TRACED` log level is disabled, there is still *some* + overhead (the logging level check, an extra function call). + + If you would like to completely *eliminate* this overhead, call this + function **before** any classes or functions in your application are + decorated with ``@traced``. The :func:`traced` decorator will be + replaced with a no-op decorator that simply returns the class or + function unmodified. + + .. note:: + The **recommended** way to install the no-op ``@traced`` + decorator is to set the ``AUTOLOGGING_TRACED_NOOP`` + environment variable to any non-empty value. + + If the ``AUTOLOGGING_TRACED_NOOP`` environment variable is + set to a non-empty value when Autologging is loaded, the + ``@traced`` no-op will be installed automatically. + + As an alternative to setting the ``AUTOLOGGING_TRACED_NOOP`` + environment variable, you can also call this function directly in + your application's bootstrap module. For example:: + + import autologging + + if running_in_production: + autologging.install_traced_noop() + + .. warning:: + This function **does not** "revert" any already-``@traced`` class + or function! It simply replaces the ``autologging.traced`` module + reference with a no-op. + + For this reason it is imperative that + ``autologging.install_traced_noop()`` be called **before** the + ``@traced`` decorator has been applied to any class or function + in the application. (This is why the ``AUTOLOGGING_TRACED_NOOP`` + environment variable is the recommended approach for installing + the no-op - it allows Autologging itself to guarantee that the + no-op is installed before any classes or functions are + decorated.) + + """ + global traced + traced = _traced_noop + logging.getLogger().info("autologging.traced no-op is installed") + + +if os.getenv("AUTOLOGGING_TRACED_NOOP"): + install_traced_noop() + +def _generate_logger_name(obj, parent_name=None): + """Generate the logger name (channel) for a class or function. + + :arg obj: a class or function + :keyword str parent_name: the name of *obj*'s parent logger + :rtype: str + + If *parent_name* is not specified, the default is to use *obj*'s + module name. + + """ + parent_logger_name = parent_name if parent_name else obj.__module__ + if isclass(obj): + return "%s.%s" % (parent_logger_name, getattr(obj, "__qualname__", obj.__name__)) + return parent_logger_name + + +def _add_logger_to(obj, logger_name=None): + """Set a :class:`logging.Logger` member on *obj*. + + :arg obj: a class or function object + :keyword str logger_name: the name (channel) of the logger for *obj* + :return: *obj* + + If *obj* is a class, the member will be named "__log". If *obj* is a + function, the member will be named "_log". + + """ + logger = logging.getLogger(logger_name if logger_name else _generate_logger_name(obj)) + if isclass(obj): + setattr(obj, _mangle_name("__log", obj.__name__), logger) + else: + obj._log = logger + return obj + + +def _make_traceable_function(function, logger): + """Create a function that delegates to either a tracing proxy or + the original *function*. + + :arg function: + an unbound, module-level (or nested) function + :arg logging.Logger logger: the tracing logger + :return: + a function that wraps *function* to provide the call and return + tracing support + + If *logger* is not enabled for the :attr:`autologging.TRACE` + level **at the time the returned delegator function is invoked**, + then the original *function* is called instead of the tracing proxy. + + The overhead that a ``@traced`` function incurs when tracing is + **disabled** is: + + * the delegator function call itself + * the ``TRACE`` level check. + + The original *function* is available from the delegator function's + ``__wrapped__`` attribute. + + """ + proxy = _FunctionTracingProxy(function, logger) + + @wraps(function) + def autologging_traced_function_delegator(*args, **keywords): + if logger.isEnabledFor(TRACE): + proxy = autologging_traced_function_delegator._tracing_proxy + return proxy(function, args, keywords) + return function(*args, **keywords) + + autologging_traced_function_delegator._tracing_proxy = proxy + if not hasattr(autologging_traced_function_delegator, "__wrapped__"): + autologging_traced_function_delegator.__wrapped__ = function + autologging_traced_function_delegator.__autologging_traced__ = True + return autologging_traced_function_delegator + + +def _install_traceable_methods(class_, *method_names, **keywords): + """Substitute tracing proxy methods for the methods named in + *method_names* in *class_*'s ``__dict__``. + + :arg class_: + a class being traced + :arg tuple method_names: + the names of the methods to be traced + :keyword logging.Logger logger: + the logger to use for tracing + :keyword bool exclude: + ``True`` to interpret *method_names* as an **exclusion** list + rather than an inclusion list + + If *method_names* is empty and the *exclude* keyword is ``False`` + (the default), then all "public", "_nonpublic", and "__internal" + methods, as well as the special "__init__" and "__call__" methods, + will be traced by default. + + If the *exclude* keyword is ``True``, then the methods that will be + traced are the default methods (as identified above) **MINUS** any + methods named in *method_names*. + + If *logger* is unspecified, a default logger will be used to log + tracing messages. + + """ + logger = keywords.get("logger", logging.getLogger(_generate_logger_name(class_))) + if method_names: + traceable_method_names = _get_traceable_method_names(method_names, class_, + exclude=(keywords.get("exclude", False))) + else: + traceable_method_names = _get_default_traceable_method_names(class_) + for method_name in traceable_method_names: + descriptor = class_.__dict__[method_name] + descriptor_type = type(descriptor) + if descriptor_type is FunctionType: + make_traceable_method = _make_traceable_instancemethod + else: + if descriptor_type is classmethod: + make_traceable_method = _make_traceable_classmethod + else: + if descriptor_type is staticmethod: + make_traceable_method = _make_traceable_staticmethod + else: + warnings.warn("tracing not supported for %r" % descriptor_type) + continue + tracing_proxy_descriptor = make_traceable_method(descriptor, logger) + setattr(class_, method_name, tracing_proxy_descriptor) + + return class_ + + +def _get_traceable_method_names(method_names, class_, **keywords): + """Filter (and possibly mangle) *method_names* so that only method + names actually defined as methods in *cls_dict* remain. + + :arg method_names: + a sequence of names that should identify methods defined in + *class_* + :arg class_: the class being traced + :keyword bool exclude: + ``True`` to interpret *method_names* as an **exclusion** list + rather than an inclusion list + :return: + a sequence of names identifying methods that are defined in + *class_* that will be traced + :rtype: list + + .. warning:: + A :exc:`UserWarning` is issued if any **included** method named + in *method_names* is not actually defined in *class_*; or if the + result of filtering **excluded** methods results in an empty + list. + + """ + exclude = keywords.get("exclude", False) + traceable_method_names = [] + if not keywords.get("exclude", False): + for name in method_names: + mname = name if not _is_internal_name(name) else _mangle_name(name, class_.__name__) + if isroutine(class_.__dict__.get(mname)): + traceable_method_names.append(mname) + else: + warnings.warn("%r does not identify a method defined in %s" % ( + name, class_.__name__)) + + else: + traceable_method_names = [name for name in _get_default_traceable_method_names(class_) if _unmangle_name(name, class_.__name__) not in method_names] + traceable_method_names or warnings.warn("exclude=True with the supplied method names results in NO traceable methods for %s" % class_.__name__) + return traceable_method_names + + +def _get_default_traceable_method_names(class_): + """Return all names in *cls_dict* that identify methods. + + :arg class_: the class being traced + :return: + a sequence of names identifying methods of *class_* that will be + traced + :rtype: list + + """ + default_traceable_method_names = [] + for name, member in class_.__dict__.items(): + if isroutine(member): + if not _is_special_name(name) or name in ('__init__', '__call__'): + default_traceable_method_names.append(name) + + return default_traceable_method_names + + +def _is_internal_name(name): + """Determine whether or not *name* is an "__internal" name. + + :arg str name: a name defined in a class ``__dict__`` + :return: ``True`` if *name* is an "__internal" name, else ``False`` + :rtype: bool + + """ + return name.startswith("__") and not name.endswith("__") + + +def _mangle_name(internal_name, class_name): + """Transform *internal_name* (which is assumed to be an "__internal" + name) into a "_ClassName__internal" name. + + :arg str internal_name: + the assumed-to-be-"__internal" member name + :arg str class_name: + name of the class where *internal_name* is defined + :return: + the transformed "_ClassName__internal" name + :rtype: + str + + """ + return "_%s%s" % (class_name.lstrip("_"), internal_name) + + +def _unmangle_name(mangled_name, class_name): + """Transform *mangled_name* (which is assumed to be a + "_ClassName__internal" name) into an "__internal" name. + + :arg str mangled_name: + a mangled "_ClassName__internal" member name + :arg str class_name: + name of the class where the (unmangled) name is defined + :return: + the transformed "__internal" name + :rtype: + str + + """ + return mangled_name.replace("_%s" % class_name.lstrip("_"), "") + + +def _is_special_name(name): + """Determine whether or not *name* is a "__special__" name. + + :arg str name: a name defined in a class ``__dict__`` + :return: ``True`` if *name* is a "__special__" name, else ``False`` + :rtype: bool + + """ + return name.startswith("__") and name.endswith("__") + + +def _make_traceable_instancemethod(unbound_function, logger): + """Create an unbound function that delegates to either a tracing + proxy or the original *unbound_function*. + + :arg unbound_function: + the unbound function for the instance method being traced + :arg logging.Logger logger: the tracing logger + :return: + an unbound function that wraps *unbound_function* to provide the + call and return tracing support + + If *logger* is not enabled for the :attr:`autologging.TRACE` + level **at the time the returned delegator function is invoked**, + then the method for the original *unbound_function* is called + instead of the tracing proxy. + + The overhead that a ``@traced`` instance method incurs when tracing + is **disabled** is: + + * the delegator function call itself + * binding the original *unbound_function* to the instance + * the ``TRACE`` level check + + The original *unbound_function* is available from the delegator + function's ``__wrapped__`` attribute. + + """ + proxy = _FunctionTracingProxy(unbound_function, logger) + + @wraps(unbound_function) + def autologging_traced_instancemethod_delegator(self_, *args, **keywords): + method = unbound_function.__get__(self_, self_.__class__) + if logger.isEnabledFor(TRACE): + proxy = autologging_traced_instancemethod_delegator._tracing_proxy + return proxy(method, args, keywords) + return method(*args, **keywords) + + autologging_traced_instancemethod_delegator._tracing_proxy = proxy + if not hasattr(autologging_traced_instancemethod_delegator, "__wrapped__"): + autologging_traced_instancemethod_delegator.__wrapped__ = unbound_function + autologging_traced_instancemethod_delegator.__autologging_traced__ = True + return autologging_traced_instancemethod_delegator + + +def _make_traceable_classmethod(method_descriptor, logger): + """Create a method descriptor that delegates to either a tracing + proxy or the original *method_descriptor*. + + :arg method_descriptor: + the method descriptor for the class method being traced + :arg logging.Logger logger: the tracing logger + :return: + a method descriptor that wraps the *method_descriptor* function + to provide the call and return tracing support + + If *logger* is not enabled for the :attr:`autologging.TRACE` + level **at the time the returned delegator method descriptor is + invoked**, then the method for the original *method_descriptor* is + called instead of the tracing proxy. + + The overhead that a ``@traced`` class method incurs when tracing is + **disabled** is: + + * the delegator function call itself + * binding the original *method_descriptor* to the class + * the ``TRACE`` level check + + The original *method_descriptor* function is available from the + delegator method descriptor's ``__func__.__wrapped__`` attribute. + + """ + function = method_descriptor.__func__ + proxy = _FunctionTracingProxy(function, logger) + + @wraps(function) + def autologging_traced_classmethod_delegator(cls, *args, **keywords): + method = method_descriptor.__get__(None, cls) + if logger.isEnabledFor(TRACE): + proxy = autologging_traced_classmethod_delegator._tracing_proxy + return proxy(method, args, keywords) + return method(*args, **keywords) + + autologging_traced_classmethod_delegator._tracing_proxy = proxy + if not hasattr(autologging_traced_classmethod_delegator, "__wrapped__"): + autologging_traced_classmethod_delegator.__wrapped__ = function + autologging_traced_classmethod_delegator.__autologging_traced__ = True + return classmethod(autologging_traced_classmethod_delegator) + + +def _make_traceable_staticmethod(method_descriptor, logger): + """Create a method descriptor that delegates to either a tracing + proxy or the original *method_descriptor*. + + :arg method_descriptor: + the method descriptor for the static method being traced + :arg logging.Logger logger: the tracing logger + :return: + a method descriptor that wraps the *method_descriptor* function + to provide the call and return tracing support + + If *logger* is not enabled for the :attr:`autologging.TRACE` + level **at the time the returned delegator method descriptor is + invoked**, then the method for the original *method_descriptor* is + called instead of the tracing proxy. + + The overhead that a ``@traced`` static method incurs when tracing is + **disabled** is: + + * the delegator function call itself + * the ``TRACE`` level check + + The original *method_descriptor* function is available from the + delegator method descriptor's ``__func__.__wrapped__`` attribute. + + """ + autologging_traced_staticmethod_delegator = _make_traceable_function(method_descriptor.__func__, logger) + return staticmethod(autologging_traced_staticmethod_delegator) + + +class _FunctionTracingProxy(object): + __doc__ = "Proxy a function invocation to capture and log the call arguments\n and return value.\n\n " + + def __init__(self, function, logger): + """ + :arg function: the function being traced + :arg logging.Logger logger: the tracing logger + + """ + func_code = function.__code__ + self._func_filename = func_code.co_filename + self._func_lineno = func_code.co_firstlineno + self._logger = logger + + @property + def logger(self): + """The tracing logger for the function.""" + return self._logger + + def __call__(self, function, args, keywords): + """Call *function*, tracing its arguments and return value. + + :arg tuple args: the positional arguments for *function* + :arg dict keywords: the keyword arguments for *function* + :return: + the value returned by calling *function* with positional + arguments *args* and keyword arguments *keywords* + + .. warning:: + This method does **not** perform a level check, and delegates + *directly* to :meth:`logging.Logger.handle`. The caller is + expected to perform the level check prior to calling this + method. + + .. note:: + If the return value of *function* is a `generator iterator + `_, + then this method returns *value* wrapped in a + :class:`_GeneratorIteratorTracingProxy` object to provide the + ``yield`` and ``StopIteration`` tracing support. + + """ + self._logger.handle(logging.LogRecord((self._logger.name), + TRACE, + (self._func_filename), + (self._func_lineno), + "CALL *%r **%r", + ( + args, keywords), + None, + func=(function.__name__))) + value = function(*args, **keywords) + self._logger.handle(logging.LogRecord((self._logger.name), + TRACE, + (self._func_filename), + (self._func_lineno), + "RETURN %r", + ( + value,), + None, + func=(function.__name__))) + if isgenerator(value): + return _GeneratorIteratorTracingProxy(function, value, self._logger) + return value + + +class _GeneratorIteratorTracingProxy(object): + __doc__ = 'Proxy a generator iterator to capture and trace *YIELD*, *SEND*,\n *THROW*, *CLOSE* and *STOP* events.\n\n .. note::\n Generator iterators cannot be "rewound." A generator iterator\n that has been exhausted will continue to raise ``StopIteration``\n on all subsequent calls to ``next()``, and Autologging will\n dutifully trace each of those events. This behavior is by design;\n if a program is failing due to an unexpected ``StopIteration``\n exception, then the (traced) program should be able to identify\n when/where the errant ``next()`` call was made.\n\n ' + __autologging_traced__ = True + + def __init__(self, gfunc, giter, logger): + """ + :arg gfunc: + the generator function that returned *giter* + :arg types.GeneratorType iterator: + the generator iterator returned by *gfunc* + :arg logging.Logger logger: the tracing logger + """ + self._gfunc_lineno = gfunc.__code__.co_firstlineno + self._giter = giter + self._logger = logger + + @property + def __wrapped__(self): + return self._giter + + @property + def __name__(self): + return self._giter.__name__ + + @property + def _lineno(self): + if not _is_ironpython: + return getattr(self._giter.gi_frame, "f_lineno", self._gfunc_lineno) + return self._gfunc_lineno + + def __iter__(self): + """Return a self-reference. + + This method (along with :meth:`__next__`) implements the + iterator protocol for the proxy object. + + """ + return self + + def _trace(self, message, *message_args): + giter = self._giter + self._logger.handle(logging.LogRecord((self._logger.name), + TRACE, + (giter.gi_code.co_filename), + (self._lineno), + message, + message_args, + None, + func=(giter.__name__))) + + def __next__(self): + """Attempt to return the next value from the wrapped generator + iterator. + + If a value is obtained, log the event at :obj:`TRACE` level in a + "YIELD" record. If the wrapped generator iterator is exhausted, + log the ``StopIteration`` event (exception) at :obj:`TRACE` + level in a "STOP" record. + + This method (along with :meth:`__iter__`) implements the + iterator protocol for the proxy object. + + """ + giter = self._giter + try: + value = next(giter) + except StopIteration: + self._trace("STOP %r", giter) + raise + else: + self._trace("YIELD %r %r", giter, value) + return value + + next = __next__ + + def send(self, value): + """Send *value* to the wrapped generator iterator, logging + the event at :obj:`TRACE` level in a "SEND" record. + + """ + giter = self._giter + self._trace("SEND %r %r", giter, value) + return giter.send(value) + + def throw(self, exception): + """Cause the wrapped generator iterator to raise *exception*, + logging the event at :obj:`TRACE` level in a "THROW" record. + + :arg Exception exception: + the exception object that the wrapped generator iterator + should throw + + """ + giter = self._giter + self._trace("THROW %r %r", giter, exception) + giter.throw(exception) + + def close(self): + """Close the wrapped generator iterator, logging the event + at :obj:`TRACE` level in a "CLOSE" record. + + """ + giter = self._giter + self._trace("CLOSE %r", giter) + giter.close() diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/__init__.py new file mode 100644 index 0000000..da95bab --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/__init__.py @@ -0,0 +1,21 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1962 bytes +""" Azure IoT Device Library + +This library provides clients and associated models for communicating with Azure IoT services +from an IoT device. +""" +from .iothub import * +from .provisioning import * +from .common import * +from . import iothub +from . import provisioning +from . import common +from . import patch_documentation +patch_documentation.execute_patch_for_sync() +__all__ = iothub.__all__ + provisioning.__all__ + common.__all__ diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/aio/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/aio/__init__.py new file mode 100644 index 0000000..9250d69 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/aio/__init__.py @@ -0,0 +1,18 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/aio/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1836 bytes +"""Azure IoT Device Library - Asynchronous + +This library provides asynchronous clients for communicating with Azure IoT services +from an IoT device. +""" +from azure.iot.device.iothub.aio import * +from azure.iot.device.provisioning.aio import * +import azure.iot.device.iothub.aio, azure.iot.device.provisioning.aio +from . import patch_documentation +patch_documentation.execute_patch_for_async() +__all__ = azure.iot.device.iothub.aio.__all__ + azure.iot.device.provisioning.aio.__all__ diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/aio/patch_documentation.py b/APPS_UNCOMPILED/lib/azure/iot/device/aio/patch_documentation.py new file mode 100644 index 0000000..0de306f --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/aio/patch_documentation.py @@ -0,0 +1,189 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/aio/patch_documentation.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 10588 bytes +"""This module provides hard coded patches used to modify items from the libraries. +Currently we have to do like this so that we don't use exec anywhere""" + +def execute_patch_for_async(): + from azure.iot.device.iothub.aio.async_clients import IoTHubDeviceClient as IoTHubDeviceClient_ + + async def connect(self): + return await super(IoTHubDeviceClient_, self).connect() + + connect.__doc__ = IoTHubDeviceClient_.connect.__doc__ + setattr(IoTHubDeviceClient_, "connect", connect) + + async def disconnect(self): + return await super(IoTHubDeviceClient_, self).disconnect() + + disconnect.__doc__ = IoTHubDeviceClient_.disconnect.__doc__ + setattr(IoTHubDeviceClient_, "disconnect", disconnect) + + async def get_twin(self): + return await super(IoTHubDeviceClient_, self).get_twin() + + get_twin.__doc__ = IoTHubDeviceClient_.get_twin.__doc__ + setattr(IoTHubDeviceClient_, "get_twin", get_twin) + + async def patch_twin_reported_properties(self, reported_properties_patch): + return await super(IoTHubDeviceClient_, self).patch_twin_reported_properties(reported_properties_patch) + + patch_twin_reported_properties.__doc__ = IoTHubDeviceClient_.patch_twin_reported_properties.__doc__ + setattr(IoTHubDeviceClient_, "patch_twin_reported_properties", patch_twin_reported_properties) + + async def receive_method_request(self, method_name=None): + return await super(IoTHubDeviceClient_, self).receive_method_request(method_name) + + receive_method_request.__doc__ = IoTHubDeviceClient_.receive_method_request.__doc__ + setattr(IoTHubDeviceClient_, "receive_method_request", receive_method_request) + + async def receive_twin_desired_properties_patch(self): + return await super(IoTHubDeviceClient_, self).receive_twin_desired_properties_patch() + + receive_twin_desired_properties_patch.__doc__ = IoTHubDeviceClient_.receive_twin_desired_properties_patch.__doc__ + setattr(IoTHubDeviceClient_, "receive_twin_desired_properties_patch", receive_twin_desired_properties_patch) + + async def send_message(self, message): + return await super(IoTHubDeviceClient_, self).send_message(message) + + send_message.__doc__ = IoTHubDeviceClient_.send_message.__doc__ + setattr(IoTHubDeviceClient_, "send_message", send_message) + + async def send_method_response(self, method_response): + return await super(IoTHubDeviceClient_, self).send_method_response(method_response) + + send_method_response.__doc__ = IoTHubDeviceClient_.send_method_response.__doc__ + setattr(IoTHubDeviceClient_, "send_method_response", send_method_response) + + def update_sastoken(self, sastoken): + return super(IoTHubDeviceClient_, self).update_sastoken(sastoken) + + update_sastoken.__doc__ = IoTHubDeviceClient_.update_sastoken.__doc__ + setattr(IoTHubDeviceClient_, "update_sastoken", update_sastoken) + + def create_from_connection_string(cls, connection_string, **kwargs): + return (super(IoTHubDeviceClient_, cls).create_from_connection_string)( + connection_string, **kwargs) + + create_from_connection_string.__doc__ = IoTHubDeviceClient_.create_from_connection_string.__doc__ + setattr(IoTHubDeviceClient_, "create_from_connection_string", classmethod(create_from_connection_string)) + + def create_from_sastoken(cls, sastoken, **kwargs): + return (super(IoTHubDeviceClient_, cls).create_from_sastoken)(sastoken, **kwargs) + + create_from_sastoken.__doc__ = IoTHubDeviceClient_.create_from_sastoken.__doc__ + setattr(IoTHubDeviceClient_, "create_from_sastoken", classmethod(create_from_sastoken)) + + def create_from_symmetric_key(cls, symmetric_key, hostname, device_id, **kwargs): + return (super(IoTHubDeviceClient_, cls).create_from_symmetric_key)( + symmetric_key, hostname, device_id, **kwargs) + + create_from_symmetric_key.__doc__ = IoTHubDeviceClient_.create_from_symmetric_key.__doc__ + setattr(IoTHubDeviceClient_, "create_from_symmetric_key", classmethod(create_from_symmetric_key)) + + def create_from_x509_certificate(cls, x509, hostname, device_id, **kwargs): + return (super(IoTHubDeviceClient_, cls).create_from_x509_certificate)( + x509, hostname, device_id, **kwargs) + + create_from_x509_certificate.__doc__ = IoTHubDeviceClient_.create_from_x509_certificate.__doc__ + setattr(IoTHubDeviceClient_, "create_from_x509_certificate", classmethod(create_from_x509_certificate)) + from azure.iot.device.iothub.aio.async_clients import IoTHubModuleClient as IoTHubModuleClient_ + + async def connect(self): + return await super(IoTHubModuleClient_, self).connect() + + connect.__doc__ = IoTHubModuleClient_.connect.__doc__ + setattr(IoTHubModuleClient_, "connect", connect) + + async def disconnect(self): + return await super(IoTHubModuleClient_, self).disconnect() + + disconnect.__doc__ = IoTHubModuleClient_.disconnect.__doc__ + setattr(IoTHubModuleClient_, "disconnect", disconnect) + + async def get_twin(self): + return await super(IoTHubModuleClient_, self).get_twin() + + get_twin.__doc__ = IoTHubModuleClient_.get_twin.__doc__ + setattr(IoTHubModuleClient_, "get_twin", get_twin) + + async def patch_twin_reported_properties(self, reported_properties_patch): + return await super(IoTHubModuleClient_, self).patch_twin_reported_properties(reported_properties_patch) + + patch_twin_reported_properties.__doc__ = IoTHubModuleClient_.patch_twin_reported_properties.__doc__ + setattr(IoTHubModuleClient_, "patch_twin_reported_properties", patch_twin_reported_properties) + + async def receive_method_request(self, method_name=None): + return await super(IoTHubModuleClient_, self).receive_method_request(method_name) + + receive_method_request.__doc__ = IoTHubModuleClient_.receive_method_request.__doc__ + setattr(IoTHubModuleClient_, "receive_method_request", receive_method_request) + + async def receive_twin_desired_properties_patch(self): + return await super(IoTHubModuleClient_, self).receive_twin_desired_properties_patch() + + receive_twin_desired_properties_patch.__doc__ = IoTHubModuleClient_.receive_twin_desired_properties_patch.__doc__ + setattr(IoTHubModuleClient_, "receive_twin_desired_properties_patch", receive_twin_desired_properties_patch) + + async def send_message(self, message): + return await super(IoTHubModuleClient_, self).send_message(message) + + send_message.__doc__ = IoTHubModuleClient_.send_message.__doc__ + setattr(IoTHubModuleClient_, "send_message", send_message) + + async def send_method_response(self, method_response): + return await super(IoTHubModuleClient_, self).send_method_response(method_response) + + send_method_response.__doc__ = IoTHubModuleClient_.send_method_response.__doc__ + setattr(IoTHubModuleClient_, "send_method_response", send_method_response) + + def update_sastoken(self, sastoken): + return super(IoTHubModuleClient_, self).update_sastoken(sastoken) + + update_sastoken.__doc__ = IoTHubModuleClient_.update_sastoken.__doc__ + setattr(IoTHubModuleClient_, "update_sastoken", update_sastoken) + + def create_from_connection_string(cls, connection_string, **kwargs): + return (super(IoTHubModuleClient_, cls).create_from_connection_string)( + connection_string, **kwargs) + + create_from_connection_string.__doc__ = IoTHubModuleClient_.create_from_connection_string.__doc__ + setattr(IoTHubModuleClient_, "create_from_connection_string", classmethod(create_from_connection_string)) + + def create_from_edge_environment(cls, **kwargs): + return (super(IoTHubModuleClient_, cls).create_from_edge_environment)(**kwargs) + + create_from_edge_environment.__doc__ = IoTHubModuleClient_.create_from_edge_environment.__doc__ + setattr(IoTHubModuleClient_, "create_from_edge_environment", classmethod(create_from_edge_environment)) + + def create_from_sastoken(cls, sastoken, **kwargs): + return (super(IoTHubModuleClient_, cls).create_from_sastoken)(sastoken, **kwargs) + + create_from_sastoken.__doc__ = IoTHubModuleClient_.create_from_sastoken.__doc__ + setattr(IoTHubModuleClient_, "create_from_sastoken", classmethod(create_from_sastoken)) + + def create_from_x509_certificate(cls, x509, hostname, device_id, module_id, **kwargs): + return (super(IoTHubModuleClient_, cls).create_from_x509_certificate)( + x509, hostname, device_id, module_id, **kwargs) + + create_from_x509_certificate.__doc__ = IoTHubModuleClient_.create_from_x509_certificate.__doc__ + setattr(IoTHubModuleClient_, "create_from_x509_certificate", classmethod(create_from_x509_certificate)) + from azure.iot.device.provisioning.aio.async_provisioning_device_client import ProvisioningDeviceClient as ProvisioningDeviceClient_ + + def create_from_symmetric_key(cls, provisioning_host, registration_id, id_scope, symmetric_key, **kwargs): + return (super(ProvisioningDeviceClient_, cls).create_from_symmetric_key)( + provisioning_host, registration_id, id_scope, symmetric_key, **kwargs) + + create_from_symmetric_key.__doc__ = ProvisioningDeviceClient_.create_from_symmetric_key.__doc__ + setattr(ProvisioningDeviceClient_, "create_from_symmetric_key", classmethod(create_from_symmetric_key)) + + def create_from_x509_certificate(cls, provisioning_host, registration_id, id_scope, x509, **kwargs): + return (super(ProvisioningDeviceClient_, cls).create_from_x509_certificate)( + provisioning_host, registration_id, id_scope, x509, **kwargs) + + create_from_x509_certificate.__doc__ = ProvisioningDeviceClient_.create_from_x509_certificate.__doc__ + setattr(ProvisioningDeviceClient_, "create_from_x509_certificate", classmethod(create_from_x509_certificate)) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/__init__.py new file mode 100644 index 0000000..3a2ec35 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/__init__.py @@ -0,0 +1,16 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 218 bytes +"""Azure IoT Device Common + +This package provides shared modules for use with various Azure IoT device-side clients. + +INTERNAL USAGE ONLY +""" +from .models import X509, ProxyOptions +__all__ = [ + "X509", "ProxyOptions"] diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/async_adapter.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/async_adapter.py new file mode 100644 index 0000000..83828ae --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/async_adapter.py @@ -0,0 +1,77 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/async_adapter.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3754 bytes +"""This module contains tools for adapting sync code for use in async coroutines.""" +import functools, logging, traceback +import azure.iot.device.common.asyncio_compat as asyncio_compat +logger = logging.getLogger(__name__) + +def emulate_async(fn): + """Returns a coroutine function that calls a given function with emulated asynchronous + behavior via use of mulithreading. + + Can be applied as a decorator. + + :param fn: The sync function to be run in async. + :returns: A coroutine function that will call the given sync function. + """ + + @functools.wraps(fn) + async def async_fn_wrapper(*args, **kwargs): + loop = asyncio_compat.get_running_loop() + return await loop.run_in_executor(None, (functools.partial)(fn, *args, **kwargs)) + + return async_fn_wrapper + + +class AwaitableCallback(object): + __doc__ = "A sync callback whose completion can be waited upon.\n " + + def __init__(self, return_arg_name=None): + """Creates an instance of an AwaitableCallback + """ + if return_arg_name: + if not isinstance(return_arg_name, str): + raise TypeError("internal error: return_arg_name must be a string") + loop = asyncio_compat.get_running_loop() + self.future = asyncio_compat.create_future(loop) + + def wrapping_callback(*args, **kwargs): + if "error" in kwargs and kwargs["error"]: + exception = kwargs["error"] + else: + if return_arg_name: + if return_arg_name in kwargs: + exception = None + result = kwargs[return_arg_name] + else: + raise TypeError("internal error: excepected argument with name '{}', did not get".format(return_arg_name)) + else: + exception = None + result = None + if exception: + logger.info("Callback completed with error {}".format(exception)) + logger.info(traceback.format_exception_only(type(exception), exception)) + loop.call_soon_threadsafe(self.future.set_exception, exception) + else: + logger.debug("Callback completed with result {}".format(result)) + loop.call_soon_threadsafe(self.future.set_result, result) + + self.callback = wrapping_callback + + def __call__(self, *args, **kwargs): + """Calls the callback. Returns the result. + """ + return (self.callback)(*args, **kwargs) + + async def completion(self): + """Awaitable coroutine method that will return once the AwaitableCallback + has been completed. + + :returns: Result of the callback when it was called. + """ + return await self.future diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/asyncio_compat.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/asyncio_compat.py new file mode 100644 index 0000000..d6e428e --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/asyncio_compat.py @@ -0,0 +1,79 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/asyncio_compat.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2455 bytes +"""This module contains compatibility tools for bridging different versions of asyncio""" +import asyncio, sys + +def get_running_loop(): + """Gets the currently running event loop + + Uses asyncio.get_running_loop() if available (Python 3.7+) or a backported + version of the same function in 3.5/3.6. + """ + try: + loop = asyncio.get_running_loop() + except AttributeError: + loop = asyncio._get_running_loop() + if loop is None: + raise RuntimeError("no running event loop") + + return loop + + +def create_task(coro): + """Creates a Task object. + + If avaialable (Python 3.7+), use asyncio.create_task, which is preferred as it is + more specific for the goal of immediately scheduling a task from a coroutine. If + not available, use the more general puprose asyncio.ensure_future. + + :returns: A new Task object. + """ + try: + task = asyncio.create_task(coro) + except AttributeError: + task = asyncio.ensure_future(coro) + + return task + + +def create_future(loop): + """Creates a Future object. + + Uses loop.create_future if it is available. Otherwise, create the object directly. + + Use of loop.create_future is preferred because it allows third parties to provide their own + Future object, but it is only available in 3.5.2+ + + :returns: A new Future object. + """ + try: + future = loop.create_future() + except AttributeError: + future = asyncio.Future(loop=loop) + + return future + + +def run(coro): + """Execute the coroutine coro and return the result. + + It creates a new event loop and closes it at the end. + Cannot be called when another asyncio event loop is running in the same thread. + + If available (Python 3.7+) use asyncio.run. If not available, use a custom implementation + that achieves the same thing + """ + if sys.version_info >= (3, 7): + return asyncio.run(coro) + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: + return loop.run_until_complete(coro) + finally: + loop.close() + asyncio.set_event_loop(None) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/__init__.py new file mode 100644 index 0000000..28c865c --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/__init__.py @@ -0,0 +1,8 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/auth/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 303 bytes +from .signing_mechanism import SymmetricKeySigningMechanism diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/connection_string.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/connection_string.py new file mode 100644 index 0000000..e49e554 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/connection_string.py @@ -0,0 +1,93 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/auth/connection_string.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3582 bytes +"""This module contains tools for working with Connection Strings""" +__all__ = [ + "ConnectionString"] +CS_DELIMITER = ";" +CS_VAL_SEPARATOR = "=" +HOST_NAME = "HostName" +SHARED_ACCESS_KEY_NAME = "SharedAccessKeyName" +SHARED_ACCESS_KEY = "SharedAccessKey" +SHARED_ACCESS_SIGNATURE = "SharedAccessSignature" +DEVICE_ID = "DeviceId" +MODULE_ID = "ModuleId" +GATEWAY_HOST_NAME = "GatewayHostName" +_valid_keys = [ + HOST_NAME, + SHARED_ACCESS_KEY_NAME, + SHARED_ACCESS_KEY, + SHARED_ACCESS_SIGNATURE, + DEVICE_ID, + MODULE_ID, + GATEWAY_HOST_NAME] + +def _parse_connection_string(connection_string): + """Return a dictionary of values contained in a given connection string + """ + try: + cs_args = connection_string.split(CS_DELIMITER) + except (AttributeError, TypeError): + raise TypeError("Connection String must be of type str") + + try: + d = dict((arg.split(CS_VAL_SEPARATOR, 1) for arg in cs_args)) + except ValueError: + raise ValueError("Invalid Connection String - Unable to parse") + + if len(cs_args) != len(d): + raise ValueError("Invalid Connection String - Unable to parse") + if not all((key in _valid_keys for key in d.keys())): + raise ValueError("Invalid Connection String - Invalid Key") + _validate_keys(d) + return d + + +def _validate_keys(d): + """Raise ValueError if incorrect combination of keys in dict d + """ + host_name = d.get(HOST_NAME) + shared_access_key_name = d.get(SHARED_ACCESS_KEY_NAME) + shared_access_key = d.get(SHARED_ACCESS_KEY) + device_id = d.get(DEVICE_ID) + if host_name and device_id and shared_access_key: + pass + elif host_name and shared_access_key and shared_access_key_name: + pass + else: + raise ValueError("Invalid Connection String - Incomplete") + + +class ConnectionString(object): + __doc__ = "Key/value mappings for connection details.\n Uses the same syntax as dictionary\n " + + def __init__(self, connection_string): + """Initializer for ConnectionString + + :param str connection_string: String with connection details provided by Azure + :raises: ValueError if provided connection_string is invalid + """ + self._dict = _parse_connection_string(connection_string) + self._strrep = connection_string + + def __getitem__(self, key): + return self._dict[key] + + def __repr__(self): + return self._strrep + + def get(self, key, default=None): + """Return the value for key if key is in the dictionary, else default + + :param str key: The key to retrieve a value for + :param str default: The default value returned if a key is not found + :returns: The value for the given key + """ + try: + return self._dict[key] + except KeyError: + return default diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/sastoken.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/sastoken.py new file mode 100644 index 0000000..91f63cc --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/sastoken.py @@ -0,0 +1,130 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/auth/sastoken.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 5796 bytes +"""This module contains tools for working with Shared Access Signature (SAS) Tokens""" +import base64, hmac, hashlib, time +import six.moves.urllib as urllib +from azure.iot.device.common.chainable_exception import ChainableException + +class SasTokenError(ChainableException): + __doc__ = "Error in SasToken" + + +class RenewableSasToken(object): + __doc__ = "Renewable Shared Access Signature Token used to authenticate a request.\n\n This token is 'renewable', which means that it can be updated when necessary to\n prevent expiry, by using the .refresh() method.\n\n Data Attributes:\n expiry_time (int): Time that token will expire (in UTC, since epoch)\n ttl (int): Time to live for the token, in seconds\n " + _auth_rule_token_format = "SharedAccessSignature sr={resource}&sig={signature}&se={expiry}&skn={keyname}" + _simple_token_format = "SharedAccessSignature sr={resource}&sig={signature}&se={expiry}" + + def __init__(self, uri, signing_mechanism, key_name=None, ttl=3600): + """ + :param str uri: URI of the resouce to be accessed + :param signing_mechanism: The signing mechanism to use in the SasToken + :type signing_mechanism: Child classes of :class:`azure.iot.common.SigningMechanism` + :param str key_name: Symmetric Key Name (optional) + :param int ttl: Time to live for the token, in seconds (default 3600) + + :raises: SasTokenError if an error occurs building a SasToken + """ + self._uri = uri + self._signing_mechanism = signing_mechanism + self._key_name = key_name + self._expiry_time = None + self._token = None + self.ttl = ttl + self.refresh() + + def __str__(self): + return self._token + + def refresh(self): + """ + Refresh the SasToken lifespan, giving it a new expiry time, and generating a new token. + """ + self._expiry_time = int(time.time() + self.ttl) + self._token = self._build_token() + + def _build_token(self): + """Buid SasToken representation + + :returns: String representation of the token + """ + url_encoded_uri = urllib.parse.quote((self._uri), safe="") + message = url_encoded_uri + "\n" + str(self.expiry_time) + try: + signature = self._signing_mechanism.sign(message) + except Exception as e: + try: + raise SasTokenError("Unable to build SasToken from given values", e) + finally: + e = None + del e + + url_encoded_signature = urllib.parse.quote(signature, safe="") + if self._key_name: + token = self._auth_rule_token_format.format(resource=url_encoded_uri, + signature=url_encoded_signature, + expiry=(str(self.expiry_time)), + keyname=(self._key_name)) + else: + token = self._simple_token_format.format(resource=url_encoded_uri, + signature=url_encoded_signature, + expiry=(str(self.expiry_time))) + return token + + @property + def expiry_time(self): + """Expiry Time is READ ONLY""" + return self._expiry_time + + +class NonRenewableSasToken(object): + __doc__ = "NonRenewable Shared Access Signature Token used to authenticate a request.\n\n This token is 'non-renewable', which means that it is invalid once it expires, and there\n is no way to keep it alive. Instead, a new token must be created.\n\n Data Attributes:\n expiry_time (int): Time that token will expire (in UTC, since epoch)\n resource_uri (str): URI for the resource the Token provides authentication to access\n " + + def __init__(self, sastoken_string): + """ + :param str sastoken_string: A string representation of a SAS token + """ + self._token = sastoken_string + self._token_info = get_sastoken_info_from_string(self._token) + + def __str__(self): + return self._token + + @property + def expiry_time(self): + """Expiry Time is READ ONLY""" + return int(self._token_info["se"]) + + @property + def resource_uri(self): + """Resource URI is READ ONLY""" + uri = self._token_info["sr"] + return urllib.parse.unquote(uri) + + +REQUIRED_SASTOKEN_FIELDS = [ + "sr", "sig", "se"] +VALID_SASTOKEN_FIELDS = REQUIRED_SASTOKEN_FIELDS + ["skn"] + +def get_sastoken_info_from_string(sastoken_string): + pieces = sastoken_string.split("SharedAccessSignature ") + if len(pieces) != 2: + raise SasTokenError("Invalid SasToken string: Not a SasToken ") + try: + sastoken_info = dict((map(str.strip, sub.split("=", 1)) for sub in pieces[1].split("&"))) + except Exception as e: + try: + raise SasTokenError("Invalid SasToken string: Incorrectly formatted", e) + finally: + e = None + del e + + if not all((key in sastoken_info for key in REQUIRED_SASTOKEN_FIELDS)): + raise SasTokenError("Invalid SasToken string: Not all required fields present") + if not all((key in VALID_SASTOKEN_FIELDS for key in sastoken_info)): + raise SasTokenError("Invalid SasToken string: Unexpected fields present") + return sastoken_info diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/signing_mechanism.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/signing_mechanism.py new file mode 100644 index 0000000..554e5a4 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/auth/signing_mechanism.py @@ -0,0 +1,63 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/auth/signing_mechanism.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2316 bytes +"""This module defines an abstract SigningMechanism, as well as common child implementations of it +""" +import six, abc, hmac, hashlib, base64 +from six.moves import urllib + +@six.add_metaclass(abc.ABCMeta) +class SigningMechanism(object): + + @abc.abstractmethod + def sign(self, data_str): + pass + + +class SymmetricKeySigningMechanism(SigningMechanism): + + def __init__(self, key): + """ + A mechanism that signs data using a symmetric key + + :param key: Symmetric Key (base64 encoded) + :type key: str or bytes + """ + try: + key = key.encode("utf-8") + except AttributeError: + pass + + try: + self._signing_key = base64.b64decode(key) + except (base64.binascii.Error, TypeError): + raise ValueError("Invalid Symmetric Key") + + def sign(self, data_str): + """ + Sign a data string with symmetric key and the HMAC-SHA256 algorithm. + + :param data_str: Data string to be signed + :type data_str: str or bytes + + :returns: The signed data + :rtype: str + """ + try: + data_str = data_str.encode("utf-8") + except AttributeError: + pass + + try: + hmac_digest = hmac.HMAC(key=(self._signing_key), + msg=data_str, + digestmod=(hashlib.sha256)).digest() + signed_data = base64.b64encode(hmac_digest) + except TypeError: + raise ValueError("Unable to sign string using the provided symmetric key") + + return signed_data.decode("utf-8") diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/callable_weak_method.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/callable_weak_method.py new file mode 100644 index 0000000..ecfb6a4 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/callable_weak_method.py @@ -0,0 +1,29 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/callable_weak_method.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3180 bytes +import weakref + +class CallableWeakMethod(object): + __doc__ = '\n Object which makes a weak reference to a method call. Similar to weakref.WeakMethod,\n but works on Python 2.7 and returns an object which is callable.\n\n This objet is used primarily for callbacks and it prevents circular references in the\n garbage collector. It is used specifically in the scenario where object holds a\n refernce to object b and b holds a callback into a (which creates a rererence\n back into a)\n\n By default, method references are _strong_, and we end up with we have a situation\n where a has a _strong) reference to b and b has a _strong_ reference to a.\n\n The Python 3.4+ garbage collectors handle this circular reference just fine, but the\n 2.7 garbage collector fails, but only when one of the objects has a finalizer method.\n\n \'\'\'\n # example of bad (strong) circular dependency:\n class A(object):\n def --init__(self):\n self.b = B() # A objects now have a strong refernce to B objects\n b.handler = a.method() # and B object have a strong reference back into A objects\n def method(self):\n pass\n \'\'\'\n\n In the example above, if a or B has a finalizer, that object will be considered uncollectable\n (on 2.7) and both objects will leak\n\n However, if we use this object, a will a _strong_ reference to b, and b will have a _weak_\n reference =back to a, and the circular depenency chain is broken.\n\n ```\n # example of better (weak) circular dependency:\n class A(object):\n def --init__(self):\n self.b = B() # A objects now have a strong refernce to B objects\n b.handler = CallableWeakMethod(a, "method") # and B objects have a WEAK reference back into A objects\n def method(self):\n pass\n ```\n\n In this example, there is no circular reference, and the Python 2.7 garbage collector is able\n to collect both objects, even if one of them has a finalizer.\n\n When we reach the point where all supported interpreters implement PEP 442, we will\n no longer need this object\n\n ref: https://www.python.org/dev/peps/pep-0442/\n ' + + def __init__(self, object, method_name): + self.object_weakref = weakref.ref(object) + self.method_name = method_name + + def _get_method(self): + return getattr(self.object_weakref(), self.method_name, None) + + def __call__(self, *args, **kwargs): + return (self._get_method())(*args, **kwargs) + + def __eq__(self, other): + return self._get_method() == other + + def __repr__(self): + if self.object_weakref(): + return "CallableWeakMethod for {}".format(self._get_method()) + return "CallableWeakMethod for {} (DEAD)".format(self.method_name) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/chainable_exception.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/chainable_exception.py new file mode 100644 index 0000000..c21a1e9 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/chainable_exception.py @@ -0,0 +1,20 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/chainable_exception.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1013 bytes + + +class ChainableException(Exception): + __doc__ = "This exception stores a reference to a previous exception which has caused\n the current one" + + def __init__(self, message=None, cause=None): + self.__cause__ = cause + super(ChainableException, self).__init__(message) + + def __str__(self): + if self.__cause__: + return "{} caused by {}".format(super(ChainableException, self).__repr__(), self.__cause__.__repr__()) + return super(ChainableException, self).__repr__() diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/evented_callback.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/evented_callback.py new file mode 100644 index 0000000..2061e7d --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/evented_callback.py @@ -0,0 +1,58 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/evented_callback.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2555 bytes +import threading, logging, six, traceback +logger = logging.getLogger(__name__) + +class EventedCallback(object): + __doc__ = "\n A sync callback whose completion can be waited upon.\n " + + def __init__(self, return_arg_name=None): + """ + Creates an instance of an EventedCallback. + + """ + if return_arg_name: + if not isinstance(return_arg_name, six.string_types): + raise TypeError("internal error: return_arg_name must be a string") + self.completion_event = threading.Event() + self.exception = None + self.result = None + + def wrapping_callback(*args, **kwargs): + if "error" in kwargs and kwargs["error"]: + self.exception = kwargs["error"] + else: + if return_arg_name: + if return_arg_name in kwargs: + self.result = kwargs[return_arg_name] + else: + raise TypeError("internal error: excepected argument with name '{}', did not get".format(return_arg_name)) + elif self.exception: + logger.info("Callback completed with error {}".format(self.exception)) + logger.info(traceback.format_exc()) + else: + logger.debug("Callback completed with result {}".format(self.result)) + self.completion_event.set() + + self.callback = wrapping_callback + + def __call__(self, *args, **kwargs): + """ + Calls the callback. + """ + (self.callback)(*args, **kwargs) + + def wait_for_completion(self, *args, **kwargs): + """ + Wait for the callback to be called, and return the results. + """ + (self.completion_event.wait)(*args, **kwargs) + if self.exception: + raise self.exception + else: + return self.result diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/handle_exceptions.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/handle_exceptions.py new file mode 100644 index 0000000..e1a7a4b --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/handle_exceptions.py @@ -0,0 +1,54 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/handle_exceptions.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2233 bytes +import logging, traceback +logger = logging.getLogger(__name__) + +def handle_background_exception(e): + """ + Function which handled exceptions that are caught in background thread. This is + typically called from the callback thread inside the pipeline. These exceptions + need special handling because callback functions are typically called inside a + non-application thread in response to non-user-initiated actions, so there's + nobody else to catch them. + + This function gets called from inside an arbitrary thread context, so code that + runs from this function should be limited to the bare minumum. + + :param Error e: Exception object raised from inside a background thread + """ + logger.error(msg="Exception caught in background thread. Unable to handle.") + logger.error(traceback.format_exception_only(type(e), e)) + + +def swallow_unraised_exception(e, log_msg=None, log_lvl='warning'): + """Swallow and log an exception object. + + Convenience function for logging, as exceptions can only be logged correctly from within a + except block. + + :param Exception e: Exception object to be swallowed. + :param str log_msg: Optional message to use when logging. + :param str log_lvl: The log level to use for logging. Default "warning". + """ + try: + raise e + except Exception: + if log_lvl == "warning": + logger.warning(log_msg) + logger.warning(traceback.format_exc()) + else: + if log_lvl == "error": + logger.error(log_msg) + logger.error(traceback.format_exc()) + else: + if log_lvl == "info": + logger.info(log_msg) + logger.info(traceback.format_exc()) + else: + logger.debug(log_msg) + logger.debug(traceback.format_exc()) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/http_transport.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/http_transport.py new file mode 100644 index 0000000..2c6087f --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/http_transport.py @@ -0,0 +1,104 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/http_transport.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 6031 bytes +import logging, uuid, threading, json, ssl +from . import transport_exceptions as exceptions +from .pipeline import pipeline_thread +from six.moves import http_client +logger = logging.getLogger(__name__) + +class HTTPTransport(object): + __doc__ = "\n A wrapper class that provides an implementation-agnostic HTTP interface.\n " + + def __init__(self, hostname, server_verification_cert=None, x509_cert=None, cipher=None): + """ + Constructor to instantiate an HTTP protocol wrapper. + + :param str hostname: Hostname or IP address of the remote host. + :param str server_verification_cert: Certificate which can be used to validate a server-side TLS connection (optional). + :param str cipher: Cipher string in OpenSSL cipher list format (optional) + :param x509_cert: Certificate which can be used to authenticate connection to a server in lieu of a password (optional). + """ + self._hostname = hostname + self._server_verification_cert = server_verification_cert + self._x509_cert = x509_cert + self._cipher = cipher + self._ssl_context = self._create_ssl_context() + + def _create_ssl_context(self): + """ + This method creates the SSLContext object used to authenticate the connection. The generated context is used by the http_client and is necessary when authenticating using a self-signed X509 cert or trusted X509 cert + """ + logger.debug("creating a SSL context") + ssl_context = ssl.SSLContext(protocol=(ssl.PROTOCOL_TLSv1_2)) + if self._server_verification_cert: + ssl_context.load_verify_locations(cadata=(self._server_verification_cert)) + else: + ssl_context.load_default_certs() + if self._cipher: + try: + ssl_context.set_ciphers(self._cipher) + except ssl.SSLError as e: + try: + raise e + finally: + e = None + del e + + if self._x509_cert is not None: + logger.debug("configuring SSL context with client-side certificate and key") + ssl_context.load_cert_chain(self._x509_cert.certificate_file, self._x509_cert.key_file, self._x509_cert.pass_phrase) + ssl_context.verify_mode = ssl.CERT_REQUIRED + ssl_context.check_hostname = True + return ssl_context + + @pipeline_thread.invoke_on_http_thread_nowait + def request(self, method, path, callback, body="", headers={}, query_params=""): + """ + This method creates a connection to a remote host, sends a request to that host, and then waits for and reads the response from that request. + + :param str method: The request method (e.g. "POST") + :param str path: The path for the URL + :param Function callback: The function that gets called when this operation is complete or has failed. The callback function must accept an error and a response dictionary, where the response dictionary contains a status code, a reason, and a response string. + :param str body: The body of the HTTP request to be sent following the headers. + :param dict headers: A dictionary that provides extra HTTP headers to be sent with the request. + :param str query_params: The optional query parameters to be appended at the end of the URL. + """ + logger.info("sending https {} request to {} .".format(method, path)) + try: + logger.debug("creating an https connection") + connection = http_client.HTTPSConnection((self._hostname), context=(self._ssl_context)) + logger.debug("connecting to host tcp socket") + connection.connect() + logger.debug("connection succeeded") + url = "https://{hostname}/{path}{query_params}".format(hostname=(self._hostname), + path=path, + query_params=("?" + query_params if query_params else "")) + logger.debug("Sending Request to HTTP URL: {}".format(url)) + logger.debug("HTTP Headers: {}".format(headers)) + logger.debug("HTTP Body: {}".format(body)) + connection.request(method, url, body=body, headers=headers) + response = connection.getresponse() + status_code = response.status + reason = response.reason + response_string = response.read() + logger.debug("response received") + logger.debug("closing connection to https host") + connection.close() + logger.debug("connection closed") + logger.info("https {} request sent to {}, and {} response received.".format(method, path, status_code)) + response_obj = {'status_code':status_code, + 'reason':reason, 'resp':response_string} + callback(response=response_obj) + except Exception as e: + try: + logger.info("Error in HTTP Transport: {}".format(e)) + callback(error=exceptions.ProtocolClientError(message="Unexpected HTTPS failure during connect", + cause=e)) + finally: + e = None + del e diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/models/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/models/__init__.py new file mode 100644 index 0000000..88bc770 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/models/__init__.py @@ -0,0 +1,13 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/models/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 206 bytes +"""Azure Device Models + +This package provides object models for use within the Azure Provisioning Device SDK and Azure IoTHub Device SDK. +""" +from .x509 import X509 +from .proxy_options import ProxyOptions diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/models/proxy_options.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/models/proxy_options.py new file mode 100644 index 0000000..4eca10c --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/models/proxy_options.py @@ -0,0 +1,49 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/models/proxy_options.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2074 bytes +""" +This module represents proxy options to enable sending traffic through proxy servers. +""" + +class ProxyOptions(object): + __doc__ = "\n A class containing various options to send traffic through proxy servers by enabling\n proxying of MQTT connection.\n " + + def __init__(self, proxy_type, proxy_addr, proxy_port, proxy_username=None, proxy_password=None): + """ + Initializer for proxy options. + :param proxy_type: The type of the proxy server. This can be one of three possible choices:socks.HTTP, socks.SOCKS4, or socks.SOCKS5 + :param proxy_addr: IP address or DNS name of proxy server + :param proxy_port: The port of the proxy server. Defaults to 1080 for socks and 8080 for http. + :param proxy_username: (optional) username for SOCKS5 proxy, or userid for SOCKS4 proxy.This parameter is ignored if an HTTP server is being used. + If it is not provided, authentication will not be used (servers may accept unauthenticated requests). + :param proxy_password: (optional) This parameter is valid only for SOCKS5 servers and specifies the respective password for the username provided. + """ + self._proxy_type = proxy_type + self._proxy_addr = proxy_addr + self._proxy_port = proxy_port + self._proxy_username = proxy_username + self._proxy_password = proxy_password + + @property + def proxy_type(self): + return self._proxy_type + + @property + def proxy_address(self): + return self._proxy_addr + + @property + def proxy_port(self): + return self._proxy_port + + @property + def proxy_username(self): + return self._proxy_username + + @property + def proxy_password(self): + return self._proxy_password diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/models/x509.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/models/x509.py new file mode 100644 index 0000000..184b816 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/models/x509.py @@ -0,0 +1,37 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/models/x509.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1463 bytes +"""This module represents a certificate that is responsible for providing client provided x509 certificates +that will eventually establish the authenticity of devices to IoTHub and Provisioning Services. +""" + +class X509(object): + __doc__ = "\n A class with references to the certificate, key, and optional pass-phrase used to authenticate\n a TLS connection using x509 certificates\n " + + def __init__(self, cert_file, key_file, pass_phrase=None): + """ + Initializer for X509 Certificate + :param cert_file: The file path to contents of the certificate (or certificate chain) + used to authenticate the device. + :param key_file: The file path to the key associated with the certificate + :param pass_phrase: (optional) The pass_phrase used to encode the key file + """ + self._cert_file = cert_file + self._key_file = key_file + self._pass_phrase = pass_phrase + + @property + def certificate_file(self): + return self._cert_file + + @property + def key_file(self): + return self._key_file + + @property + def pass_phrase(self): + return self._pass_phrase diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/mqtt_transport.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/mqtt_transport.py new file mode 100644 index 0000000..598c2ef --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/mqtt_transport.py @@ -0,0 +1,513 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/mqtt_transport.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 28817 bytes +import paho.mqtt.client as mqtt +import logging, ssl, sys, threading, traceback, weakref, socket +from . import transport_exceptions as exceptions +import socks +logger = logging.getLogger(__name__) +paho_connack_rc_to_error = {(mqtt.CONNACK_REFUSED_PROTOCOL_VERSION): (exceptions.ProtocolClientError), + (mqtt.CONNACK_REFUSED_IDENTIFIER_REJECTED): (exceptions.ProtocolClientError), + (mqtt.CONNACK_REFUSED_SERVER_UNAVAILABLE): (exceptions.ConnectionFailedError), + (mqtt.CONNACK_REFUSED_BAD_USERNAME_PASSWORD): (exceptions.UnauthorizedError), + (mqtt.CONNACK_REFUSED_NOT_AUTHORIZED): (exceptions.UnauthorizedError)} +paho_rc_to_error = {(mqtt.MQTT_ERR_NOMEM): (exceptions.ProtocolClientError), + (mqtt.MQTT_ERR_PROTOCOL): (exceptions.ProtocolClientError), + (mqtt.MQTT_ERR_INVAL): (exceptions.ProtocolClientError), + (mqtt.MQTT_ERR_NO_CONN): (exceptions.ConnectionDroppedError), + (mqtt.MQTT_ERR_CONN_REFUSED): (exceptions.ConnectionFailedError), + (mqtt.MQTT_ERR_NOT_FOUND): (exceptions.ConnectionFailedError), + (mqtt.MQTT_ERR_CONN_LOST): (exceptions.ConnectionDroppedError), + (mqtt.MQTT_ERR_TLS): (exceptions.UnauthorizedError), + (mqtt.MQTT_ERR_PAYLOAD_SIZE): (exceptions.ProtocolClientError), + (mqtt.MQTT_ERR_NOT_SUPPORTED): (exceptions.ProtocolClientError), + (mqtt.MQTT_ERR_AUTH): (exceptions.UnauthorizedError), + (mqtt.MQTT_ERR_ACL_DENIED): (exceptions.UnauthorizedError), + (mqtt.MQTT_ERR_UNKNOWN): (exceptions.ProtocolClientError), + (mqtt.MQTT_ERR_ERRNO): (exceptions.ProtocolClientError), + (mqtt.MQTT_ERR_QUEUE_SIZE): (exceptions.ProtocolClientError)} + +def _create_error_from_connack_rc_code(rc): + """ + Given a paho CONNACK rc code, return an Exception that can be raised + """ + message = mqtt.connack_string(rc) + if rc in paho_connack_rc_to_error: + return paho_connack_rc_to_error[rc](message) + return exceptions.ProtocolClientError("Unknown CONNACK rc={}".format(rc)) + + +def _create_error_from_rc_code(rc): + """ + Given a paho rc code, return an Exception that can be raised + """ + if rc == 1: + return exceptions.ConnectionDroppedError("Paho returned rc==1") + if rc in paho_rc_to_error: + message = mqtt.error_string(rc) + return paho_rc_to_error[rc](message) + return exceptions.ProtocolClientError("Unknown CONNACK rc=={}".format(rc)) + + +class MQTTTransport(object): + __doc__ = "\n A wrapper class that provides an implementation-agnostic MQTT message broker interface.\n\n :ivar on_mqtt_connected_handler: Event handler callback, called upon establishing a connection.\n :type on_mqtt_connected_handler: Function\n :ivar on_mqtt_disconnected_handler: Event handler callback, called upon a disconnection.\n :type on_mqtt_disconnected_handler: Function\n :ivar on_mqtt_message_received_handler: Event handler callback, called upon receiving a message.\n :type on_mqtt_message_received_handler: Function\n :ivar on_mqtt_connection_failure_handler: Event handler callback, called upon a connection failure.\n :type on_mqtt_connection_failure_handler: Function\n " + + def __init__(self, client_id, hostname, username, server_verification_cert=None, x509_cert=None, websockets=False, cipher=None, proxy_options=None, keep_alive=None): + """ + Constructor to instantiate an MQTT protocol wrapper. + :param str client_id: The id of the client connecting to the broker. + :param str hostname: Hostname or IP address of the remote broker. + :param str username: Username for login to the remote broker. + :param str server_verification_cert: Certificate which can be used to validate a server-side TLS connection (optional). + :param x509_cert: Certificate which can be used to authenticate connection to a server in lieu of a password (optional). + :param bool websockets: Indicates whether or not to enable a websockets connection in the Transport. + :param str cipher: Cipher string in OpenSSL cipher list format + :param proxy_options: Options for sending traffic through proxy servers. + """ + self._client_id = client_id + self._hostname = hostname + self._username = username + self._mqtt_client = None + self._server_verification_cert = server_verification_cert + self._x509_cert = x509_cert + self._websockets = websockets + self._cipher = cipher + self._proxy_options = proxy_options + self._keep_alive = keep_alive + self.on_mqtt_connected_handler = None + self.on_mqtt_disconnected_handler = None + self.on_mqtt_message_received_handler = None + self.on_mqtt_connection_failure_handler = None + self._op_manager = OperationManager() + self._mqtt_client = self._create_mqtt_client() + + def _create_mqtt_client(self): + """ + Create the MQTT client object and assign all necessary event handler callbacks. + """ + logger.debug("creating mqtt client") + if self._websockets: + logger.info("Creating client for connecting using MQTT over websockets") + mqtt_client = mqtt.Client(client_id=(self._client_id), + clean_session=False, + protocol=(mqtt.MQTTv311), + transport="websockets") + mqtt_client.ws_set_options(path="/$iothub/websocket") + else: + logger.info("Creating client for connecting using MQTT over TCP") + mqtt_client = mqtt.Client(client_id=(self._client_id), + clean_session=False, + protocol=(mqtt.MQTTv311)) + if self._proxy_options: + logger.info("Setting custom proxy options on mqtt client") + mqtt_client.proxy_set(proxy_type=(self._proxy_options.proxy_type), + proxy_addr=(self._proxy_options.proxy_address), + proxy_port=(self._proxy_options.proxy_port), + proxy_username=(self._proxy_options.proxy_username), + proxy_password=(self._proxy_options.proxy_password)) + mqtt_client.enable_logger(logging.getLogger("paho")) + ssl_context = self._create_ssl_context() + mqtt_client.tls_set_context(context=ssl_context) + self_weakref = weakref.ref(self) + + def on_connect(client, userdata, flags, rc): + this = self_weakref() + logger.info("connected with result code: {}".format(rc)) + if rc: + if this.on_mqtt_connection_failure_handler: + try: + this.on_mqtt_connection_failure_handler(_create_error_from_connack_rc_code(rc)) + except Exception: + logger.error("Unexpected error calling on_mqtt_connection_failure_handler") + logger.error(traceback.format_exc()) + + else: + logger.error("connection failed, but no on_mqtt_connection_failure_handler handler callback provided") + else: + if this.on_mqtt_connected_handler: + try: + this.on_mqtt_connected_handler() + except Exception: + logger.error("Unexpected error calling on_mqtt_connected_handler") + logger.error(traceback.format_exc()) + + else: + logger.error("No event handler callback set for on_mqtt_connected_handler") + + def on_disconnect(client, userdata, rc): + this = self_weakref() + logger.info("disconnected with result code: {}".format(rc)) + cause = None + if rc: + logger.debug("".join(traceback.format_stack())) + cause = _create_error_from_rc_code(rc) + if this: + this._cleanup_transport_on_error() + elif not this: + logger.info("on_disconnect called with transport==None. Transport must have been garbage collected. stopping loop") + client.loop_stop() + else: + if this.on_mqtt_disconnected_handler: + try: + this.on_mqtt_disconnected_handler(cause) + except Exception: + logger.error("Unexpected error calling on_mqtt_disconnected_handler") + logger.error(traceback.format_exc()) + + else: + logger.error("No event handler callback set for on_mqtt_disconnected_handler") + + def on_subscribe(client, userdata, mid, granted_qos): + this = self_weakref() + logger.info("suback received for {}".format(mid)) + this._op_manager.complete_operation(mid) + + def on_unsubscribe(client, userdata, mid): + this = self_weakref() + logger.info("UNSUBACK received for {}".format(mid)) + this._op_manager.complete_operation(mid) + + def on_publish(client, userdata, mid): + this = self_weakref() + logger.info("payload published for {}".format(mid)) + this._op_manager.complete_operation(mid) + + def on_message(client, userdata, mqtt_message): + this = self_weakref() + logger.info("message received on {}".format(mqtt_message.topic)) + if this.on_mqtt_message_received_handler: + try: + this.on_mqtt_message_received_handler(mqtt_message.topic, mqtt_message.payload) + except Exception: + logger.error("Unexpected error calling on_mqtt_message_received_handler") + logger.error(traceback.format_exc()) + + else: + logger.error("No event handler callback set for on_mqtt_message_received_handler - DROPPING MESSAGE") + + mqtt_client.on_connect = on_connect + mqtt_client.on_disconnect = on_disconnect + mqtt_client.on_subscribe = on_subscribe + mqtt_client.on_unsubscribe = on_unsubscribe + mqtt_client.on_publish = on_publish + mqtt_client.on_message = on_message + mqtt_client.reconnect_delay_set(7200) + logger.debug("Created MQTT protocol client, assigned callbacks") + return mqtt_client + + def _cleanup_transport_on_error(self): + """ + After disconnecting because of an error, Paho was designed to keep the loop running and + to try reconnecting after the reconnect interval. We don't want Paho to reconnect because + we want to control the timing of the reconnect, so we force the loop to stop. + + We are relying on intimite knowledge of Paho behavior here. If this becomes a problem, + it may be necessary to write our own Paho thread and stop using thread_start()/thread_stop(). + This is certainly supported by Paho, but the thread that Paho provides works well enough + (so far) and making our own would be more complex than is currently justified. + """ + logger.info("Forcing paho disconnect to prevent it from automatically reconnecting") + self._mqtt_client.disconnect() + self._mqtt_client.loop_stop() + if threading.current_thread() == self._mqtt_client._thread: + logger.debug("in paho thread. nulling _thread") + self._mqtt_client._thread = None + logger.debug("Done forcing paho disconnect") + + def _create_ssl_context(self): + """ + This method creates the SSLContext object used by Paho to authenticate the connection. + """ + logger.debug("creating a SSL context") + ssl_context = ssl.SSLContext(protocol=(ssl.PROTOCOL_TLSv1_2)) + if self._server_verification_cert: + logger.debug("configuring SSL context with custom server verification cert") + ssl_context.load_verify_locations(cadata=(self._server_verification_cert)) + else: + logger.debug("configuring SSL context with default certs") + ssl_context.load_default_certs() + if self._cipher: + try: + logger.debug("configuring SSL context with cipher suites") + ssl_context.set_ciphers(self._cipher) + except ssl.SSLError as e: + try: + raise e + finally: + e = None + del e + + if self._x509_cert is not None: + logger.debug("configuring SSL context with client-side certificate and key") + ssl_context.load_cert_chain(self._x509_cert.certificate_file, self._x509_cert.key_file, self._x509_cert.pass_phrase) + ssl_context.verify_mode = ssl.CERT_REQUIRED + ssl_context.check_hostname = True + return ssl_context + + def connect(self, password=None): + """ + Connect to the MQTT broker, using hostname and username set at instantiation. + + This method should be called as an entry point before sending any telemetry. + + The password is not required if the transport was instantiated with an x509 certificate. + + If MQTT connection has been proxied, connection will take a bit longer to allow negotiation + with the proxy server. Any errors in the proxy connection process will trigger exceptions + + :param str password: The password for connecting with the MQTT broker (Optional). + + :raises: ConnectionFailedError if connection could not be established. + :raises: ConnectionDroppedError if connection is dropped during execution. + :raises: UnauthorizedError if there is an error authenticating. + :raises: ProtocolClientError if there is some other client error. + """ + logger.debug("connecting to mqtt broker") + self._mqtt_client.username_pw_set(username=(self._username), password=password) + try: + if self._websockets: + logger.info("Connect using port 443 (websockets)") + rc = self._mqtt_client.connect(host=(self._hostname), + port=443, + keepalive=(self._keep_alive)) + else: + logger.info("Connect using port 8883 (TCP)") + rc = self._mqtt_client.connect(host=(self._hostname), + port=8883, + keepalive=(self._keep_alive)) + except socket.error as e: + try: + self._cleanup_transport_on_error() + if isinstance(e, ssl.SSLError) and e.strerror is not None and "CERTIFICATE_VERIFY_FAILED" in e.strerror: + raise exceptions.TlsExchangeAuthError(cause=e) + else: + if isinstance(e, socks.ProxyError): + if isinstance(e, socks.SOCKS5AuthError): + raise exceptions.UnauthorizedError(cause=e) + else: + raise exceptions.ProtocolProxyError(cause=e) + else: + raise exceptions.ConnectionFailedError(cause=e) + finally: + e = None + del e + + except socks.ProxyError as pe: + try: + self._cleanup_transport_on_error() + if isinstance(pe, socks.SOCKS5AuthError): + raise exceptions.UnauthorizedError(cause=pe) + else: + raise exceptions.ProtocolProxyError(cause=pe) + finally: + pe = None + del pe + + except Exception as e: + try: + self._cleanup_transport_on_error() + raise exceptions.ProtocolClientError(message="Unexpected Paho failure during connect", + cause=e) + finally: + e = None + del e + + logger.debug("_mqtt_client.connect returned rc={}".format(rc)) + if rc: + raise _create_error_from_rc_code(rc) + self._mqtt_client.loop_start() + + def disconnect(self): + """ + Disconnect from the MQTT broker. + + :raises: ProtocolClientError if there is some client error. + """ + logger.info("disconnecting MQTT client") + try: + try: + rc = self._mqtt_client.disconnect() + except Exception as e: + try: + raise exceptions.ProtocolClientError(message="Unexpected Paho failure during disconnect", + cause=e) + finally: + e = None + del e + + finally: + self._mqtt_client.loop_stop() + if threading.current_thread() == self._mqtt_client._thread: + logger.debug("in paho thread. nulling _thread") + self._mqtt_client._thread = None + + logger.debug("_mqtt_client.disconnect returned rc={}".format(rc)) + if rc: + err = _create_error_from_rc_code(rc) + raise err + + def subscribe(self, topic, qos=1, callback=None): + """ + This method subscribes the client to one topic from the MQTT broker. + + :param str topic: a single string specifying the subscription topic to subscribe to + :param int qos: the desired quality of service level for the subscription. Defaults to 1. + :param callback: A callback to be triggered upon completion (Optional). + + :return: message ID for the subscribe request. + + :raises: ValueError if qos is not 0, 1 or 2. + :raises: ValueError if topic is None or has zero string length. + :raises: ConnectionDroppedError if connection is dropped during execution. + :raises: ProtocolClientError if there is some other client error. + """ + logger.info("subscribing to {} with qos {}".format(topic, qos)) + try: + rc, mid = self._mqtt_client.subscribe(topic, qos=qos) + except ValueError: + raise + except Exception as e: + try: + raise exceptions.ProtocolClientError(message="Unexpected Paho failure during subscribe", + cause=e) + finally: + e = None + del e + + logger.debug("_mqtt_client.subscribe returned rc={}".format(rc)) + if rc: + raise _create_error_from_rc_code(rc) + self._op_manager.establish_operation(mid, callback) + + def unsubscribe(self, topic, callback=None): + """ + Unsubscribe the client from one topic on the MQTT broker. + + :param str topic: a single string which is the subscription topic to unsubscribe from. + :param callback: A callback to be triggered upon completion (Optional). + + :raises: ValueError if topic is None or has zero string length. + :raises: ConnectionDroppedError if connection is dropped during execution. + :raises: ProtocolClientError if there is some other client error. + """ + logger.info("unsubscribing from {}".format(topic)) + try: + rc, mid = self._mqtt_client.unsubscribe(topic) + except ValueError: + raise + except Exception as e: + try: + raise exceptions.ProtocolClientError(message="Unexpected Paho failure during unsubscribe", + cause=e) + finally: + e = None + del e + + logger.debug("_mqtt_client.unsubscribe returned rc={}".format(rc)) + if rc: + raise _create_error_from_rc_code(rc) + self._op_manager.establish_operation(mid, callback) + + def publish(self, topic, payload, qos=1, callback=None): + """ + Send a message via the MQTT broker. + + :param str topic: topic: The topic that the message should be published on. + :param payload: The actual message to send. + :type payload: str, bytes, int, float or None + :param int qos: the desired quality of service level for the subscription. Defaults to 1. + :param callback: A callback to be triggered upon completion (Optional). + + :raises: ValueError if qos is not 0, 1 or 2 + :raises: ValueError if topic is None or has zero string length + :raises: ValueError if topic contains a wildcard ("+") + :raises: ValueError if the length of the payload is greater than 268435455 bytes + :raises: TypeError if payload is not a valid type + :raises: ConnectionDroppedError if connection is dropped during execution. + :raises: ProtocolClientError if there is some other client error. + """ + logger.info("publishing on {}".format(topic)) + try: + rc, mid = self._mqtt_client.publish(topic=topic, payload=payload, qos=qos) + except ValueError: + raise + except TypeError: + raise + except Exception as e: + try: + raise exceptions.ProtocolClientError(message="Unexpected Paho failure during publish", + cause=e) + finally: + e = None + del e + + logger.debug("_mqtt_client.publish returned rc={}".format(rc)) + if rc: + raise _create_error_from_rc_code(rc) + self._op_manager.establish_operation(mid, callback) + + +class OperationManager(object): + __doc__ = "Tracks pending operations and thier associated callbacks until completion.\n " + + def __init__(self): + self._pending_operation_callbacks = {} + self._unknown_operation_completions = {} + self._lock = threading.Lock() + + def establish_operation(self, mid, callback=None): + """Establish a pending operation identified by MID, and store its completion callback. + + If the operation has already been completed, the callback will be triggered. + """ + trigger_callback = False + with self._lock: + if mid in self._unknown_operation_completions: + del self._unknown_operation_completions[mid] + trigger_callback = True + else: + self._pending_operation_callbacks[mid] = callback + logger.debug("Waiting for response on MID: {}".format(mid)) + if trigger_callback: + logger.debug("Response for MID: {} was received early - triggering callback".format(mid)) + if callback: + try: + callback() + except Exception: + logger.error("Unexpected error calling callback for MID: {}".format(mid)) + logger.error(traceback.format_exc()) + + else: + logger.debug("No callback for MID: {}".format(mid)) + + def complete_operation(self, mid): + """Complete an operation identified by MID and trigger the associated completion callback. + + If the operation MID is unknown, the completion status will be stored until + the operation is established. + """ + callback = None + trigger_callback = False + with self._lock: + if mid in self._pending_operation_callbacks: + callback = self._pending_operation_callbacks[mid] + del self._pending_operation_callbacks[mid] + trigger_callback = True + else: + logger.debug("Response received for unknown MID: {}".format(mid)) + self._unknown_operation_completions[mid] = mid + if trigger_callback: + logger.debug("Response received for recognized MID: {} - triggering callback".format(mid)) + if callback: + try: + callback() + except Exception: + logger.error("Unexpected error calling callback for MID: {}".format(mid)) + logger.error(traceback.format_exc()) + + else: + logger.debug("No callback set for MID: {}".format(mid)) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/__init__.py new file mode 100644 index 0000000..39bde25 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/__init__.py @@ -0,0 +1,17 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 342 bytes +"""Azure IoT Hub Device SDK Pipeline + +This package provides pipeline objects for use with the Azure IoT Hub Device SDK. + +INTERNAL USAGE ONLY +""" +from .pipeline_events_base import PipelineEvent +from .pipeline_ops_base import PipelineOperation +from .pipeline_stages_base import PipelineStage +from .pipeline_exceptions import OperationCancelled diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/config.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/config.py new file mode 100644 index 0000000..3b218b0 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/config.py @@ -0,0 +1,41 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/config.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3947 bytes +import logging, six, abc +from azure.iot.device import constant +logger = logging.getLogger(__name__) +DEFAULT_KEEPALIVE = 60 + +@six.add_metaclass(abc.ABCMeta) +class BasePipelineConfig(object): + __doc__ = "A base class for storing all configurations/options shared across the Azure IoT Python Device Client Library.\n More specific configurations such as those that only apply to the IoT Hub Client will be found in the respective\n config files.\n " + + def __init__Parse error at or near `LOAD_FAST' instruction at offset 86 + + @staticmethod + def _sanitize_cipher(cipher): + """Sanitize the cipher input and convert to a string in OpenSSL list format + """ + if isinstance(cipher, list): + cipher = ":".join(cipher) + elif isinstance(cipher, str): + cipher = cipher.upper() + cipher = cipher.replace("_", "-") + else: + raise TypeError("Invalid type for 'cipher'") + return cipher + + @staticmethod + def _validate_keep_alive(keep_alive): + try: + keep_alive = int(keep_alive) + except (ValueError, TypeError): + raise ValueError("Invalid type for 'keep alive'. Permissible types are integer.") + + if keep_alive <= 0 or keep_alive > constant.MAX_KEEP_ALIVE_SECS: + raise ValueError("'keep alive' can not be zero OR negative AND can not be more than 29 minutes. It is recommended to choose 'keep alive' around 60 secs.") + return keep_alive \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_events_base.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_events_base.py new file mode 100644 index 0000000..30a8e1b --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_events_base.py @@ -0,0 +1,38 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_events_base.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3234 bytes + + +class PipelineEvent(object): + __doc__ = "\n A base class for data objects representing events that travels up the pipeline.\n\n PipelineEvent objects are used for anything that happens inside the pipeine that\n cannot be attributed to a specific operation, such as a spontaneous disconnect.\n\n PipelineEvents flow up the pipeline until they reach the client. Every stage\n has the opportunity to handle a given event. If they don't handle it, they\n should pass it up to the next stage (this is the default behavior). Stages\n have the opportunity to tie a PipelineEvent to a PipelineOperation object\n if they are waiting for a response for that particular operation.\n\n :ivar name: The name of the event. This is used primarily for logging\n :type name: str\n " + + def __init__(self): + """ + Initializer for PipelineEvent objects. + """ + if self.__class__ == PipelineEvent: + raise TypeError("Cannot instantiate PipelineEvent object. You need to use a derived class") + self.name = self.__class__.__name__ + + +class ResponseEvent(PipelineEvent): + __doc__ = "\n A PipelineEvent object which is the second part of an RequestAndResponseOperation operation\n (the response). The RequestAndResponseOperation represents the common operation of sending\n a request to iothub with a request_id ($rid) value and waiting for a response with\n the same $rid value. This convention is used by both Twin and Provisioning features.\n\n The response represented by this event has not yet been matched to the corresponding\n RequestOperation operation. That matching is done by the CoordinateRequestAndResponseStage\n stage which takes the contents of this event and puts it into the RequestAndResponseOperation\n operation with the matching $rid value.\n\n :ivar request_id: The request ID which will eventually be used to match a RequestOperation\n operation to this event.\n :type request_id: str\n :ivar status_code: The status code returned by the response. Any value under 300 is\n considered success.\n :type status_code: int\n :ivar response_body: The body of the response.\n :type response_body: str\n :ivar retry_after: A retry interval value that was extracted from the topic.\n :type retry_after: int\n " + + def __init__(self, request_id, status_code, response_body, retry_after=None): + super(ResponseEvent, self).__init__() + self.request_id = request_id + self.status_code = status_code + self.response_body = response_body + self.retry_after = retry_after + + +class ConnectedEvent(PipelineEvent): + __doc__ = "\n A PipelineEvent object indicating a connection has been established.\n " + + +class DisconnectedEvent(PipelineEvent): + __doc__ = "\n A PipelineEvent object indicating a connection has been dropped.\n " diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_events_mqtt.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_events_mqtt.py new file mode 100644 index 0000000..a817b24 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_events_mqtt.py @@ -0,0 +1,22 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_events_mqtt.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 890 bytes +from . import PipelineEvent + +class IncomingMQTTMessageEvent(PipelineEvent): + __doc__ = "\n A PipelineEvent object which represents an incoming MQTT message on some MQTT topic\n " + + def __init__(self, topic, payload): + """ + Initializer for IncomingMQTTMessageEvent objects. + + :param str topic: The name of the topic that the incoming message arrived on. + :param str payload: The payload of the message + """ + super(IncomingMQTTMessageEvent, self).__init__() + self.topic = topic + self.payload = payload diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_exceptions.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_exceptions.py new file mode 100644 index 0000000..7be9dce --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_exceptions.py @@ -0,0 +1,28 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_exceptions.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 961 bytes +"""This module defines exceptions that may be raised from a pipeline""" +from azure.iot.device.common.chainable_exception import ChainableException + +class PipelineException(ChainableException): + __doc__ = "Generic pipeline exception" + + +class OperationCancelled(PipelineException): + __doc__ = "Operation was cancelled" + + +class OperationError(PipelineException): + __doc__ = "Error while executing an Operation" + + +class PipelineTimeoutError(PipelineException): + __doc__ = "\n Pipeline operation timed out\n " + + +class PipelineError(PipelineException): + __doc__ = "Error caused by incorrect pipeline configuration" diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_base.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_base.py new file mode 100644 index 0000000..9271a6a --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_base.py @@ -0,0 +1,273 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_ops_base.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 19917 bytes +import sys, logging, traceback +from . import pipeline_exceptions +from . import pipeline_thread +from azure.iot.device.common import handle_exceptions +logger = logging.getLogger(__name__) + +class PipelineOperation(object): + __doc__ = '\n A base class for data objects representing operations that travels down the pipeline.\n\n Each PipelineOperation object represents a single asyncroneous operation that is performed\n by the pipeline. The PipelineOperation objects travel through "stages" of the pipeline,\n and each stage has the opportunity to act on each specific operation that it\n receives. If a stage does not handle a particular operation, it needs to pass it to the\n next stage. If the operation gets to the end of the pipeline without being handled\n (completed), then it is treated as an error.\n\n :ivar name: The name of the operation. This is used primarily for logging\n :type name: str\n :ivar callback: The callback that is called when the operation is completed, either\n successfully or with a failure.\n :type callback: Function\n :ivar needs_connection: This is an attribute that indicates whether a particular operation\n requires a connection to operate. This is currently used by the AutoConnectStage\n stage, but this functionality will be revamped shortly.\n :type needs_connection: Boolean\n :ivar error: The presence of a value in the error attribute indicates that the operation failed,\n absence of this value indicates that the operation either succeeded or hasn\'t been handled yet.\n :type error: Error\n ' + + def __init__(self, callback): + """ + Initializer for PipelineOperation objects. + + :param Function callback: The function that gets called when this operation is complete or has + failed. The callback function must accept A PipelineOperation object which indicates + the specific operation which has completed or failed. + """ + if self.__class__ == PipelineOperation: + raise TypeError("Cannot instantiate PipelineOperation object. You need to use a derived class") + self.name = self.__class__.__name__ + self.callback_stack = [] + self.needs_connection = False + self.completed = False + self.completing = False + self.error = None + self.add_callback(callback) + + def add_callback(self, callback): + """Adds a callback to the Operation that will be triggered upon Operation completion. + + When an Operation is completed, all callbacks will be resolved in LIFO order. + + Callbacks cannot be added to an already completed operation, or an operation that is + currently undergoing a completion process. + + :param callback: The callback to add to the operation. + + :raises: OperationError if the operation is already completed, or is in the process of + completing. + """ + if self.completed: + raise pipeline_exceptions.OperationError("{}: Attempting to add a callback to an already-completed operation!".format(self.name)) + elif self.completing: + raise pipeline_exceptions.OperationError("{}: Attempting to add a callback to a operation with completion in progress!".format(self.name)) + else: + self.callback_stack.append(callback) + + @pipeline_thread.runs_on_pipeline_thread + def complete(self, error=None): + """ Complete the operation, and trigger all callbacks in LIFO order. + + The operation is completed successfully be default, or completed unsucessfully if an error + is provided. + + An operation that is already fully completed, or in the process of completion cannot be + completed again. + + This process can be halted if a callback for the operation invokes the .halt_completion() + method on this Operation. + + :param error: Optionally provide an Exception object indicating the error that caused + the completion. Providing an error indicates that the operation was unsucessful. + """ + if error: + logger.debug("{}: completing with error {}".format(self.name, error)) + else: + logger.debug("{}: completing without error".format(self.name)) + if self.completed or self.completing: + e = pipeline_exceptions.OperationError("Attempting to complete an already-completed operation: {}".format(self.name)) + handle_exceptions.handle_background_exception(e) + else: + self.completing = True + self.error = error + while self.callback_stack: + if not self.completing: + logger.debug("{}: Completion halted!".format(self.name)) + break + if self.completed: + e = pipeline_exceptions.OperationError("Operation reached fully completed state while still resolving completion: {}".format(self.name)) + handle_exceptions.handle_background_exception(e) + break + callback = self.callback_stack.pop() + try: + callback(op=self, error=error) + except Exception as e: + try: + logger.warning("Unhandled error while triggering callback for {}".format(self.name)) + handle_exceptions.handle_background_exception(e) + finally: + e = None + del e + + if self.completing: + self.completing = False + self.completed = True + + @pipeline_thread.runs_on_pipeline_thread + def halt_completion(self): + """Halt the completion of an operation that is currently undergoing a completion process + as a result of a call to .complete(). + + Completion cannot be halted if there is no currently ongoing completion process. The only + way to successfully invoke this method is from within a callback on the Operation in + question. + + This method will leave any yet-untriggered callbacks on the Operation to be triggered upon + a later completion. + + This method will clear any error associated with the currently ongoing completion process + from the Operation. + """ + if not self.completing: + e = pipeline_exceptions.OperationError("Attempting to halt completion of an operation not in the process of completion: {}".format(self.name)) + handle_exceptions.handle_background_exception(e) + else: + logger.debug("{}: Halting completion...".format(self.name)) + self.completing = False + self.error = None + + @pipeline_thread.runs_on_pipeline_thread + def spawn_worker_op(self, worker_op_type, **kwargs): + """Create and return a new operation, which, when completed, will complete the operation + it was spawned from. + + :param worker_op_type: The type (class) of the new worker operation. + :param **kwargs: The arguments to instantiate the new worker operation with. Note that a + callback is not required, but if provided, will be triggered prior to completing the + operation that spawned the worker operation. + + :returns: A new worker operation of the type specified in the worker_op_type parameter. + """ + logger.debug("{}: creating worker op of type {}".format(self.name, worker_op_type.__name__)) + + @pipeline_thread.runs_on_pipeline_thread + def on_worker_op_complete(op, error): + logger.debug("{}: Worker op ({}) has been completed".format(self.name, op.name)) + self.complete(error=error) + + if "callback" in kwargs: + provided_callback = kwargs["callback"] + kwargs["callback"] = on_worker_op_complete + worker_op = worker_op_type(**kwargs) + worker_op.add_callback(provided_callback) + else: + kwargs["callback"] = on_worker_op_complete + worker_op = worker_op_type(**kwargs) + return worker_op + + +class InitializePipelineOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation for doing initial setup of the pipeline\n\n Attributes can be dynamically added to this operation for use in other stages if necessary\n (e.g. initialization requires a derived value)\n " + + +class ConnectOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which tells the pipeline to connect to whatever service it needs to connect to.\n\n This operation is in the group of base operations because connecting is a common operation that many clients might need to do.\n\n Even though this is an base operation, it will most likely be handled by a more specific stage (such as an IoTHub or MQTT stage).\n " + + def __init__(self, callback): + self.watchdog_timer = None + super(ConnectOperation, self).__init__(callback) + + +class ReauthorizeConnectionOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which tells the pipeline to reauthorize the connection to whatever service it is connected to.\n\n Clients will most-likely submit a ReauthorizeConnectionOperation when some credential (such as a sas token) has changed and the protocol client\n needs to re-establish the connection to refresh the credentials\n\n This operation is in the group of base operations because reauthorizinging is a common operation that many clients might need to do.\n\n Even though this is an base operation, it will most likely be handled by a more specific stage (such as an IoTHub or MQTT stage).\n " + + def __init__(self, callback): + self.watchdog_timer = None + super(ReauthorizeConnectionOperation, self).__init__(callback) + + +class DisconnectOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which tells the pipeline to disconnect from whatever service it might be connected to.\n\n This operation is in the group of base operations because disconnecting is a common operation that many clients might need to do.\n\n Even though this is an base operation, it will most likely be handled by a more specific stage (such as an IoTHub or MQTT stage).\n " + + +class EnableFeatureOperation(PipelineOperation): + __doc__ = '\n A PipelineOperation object which tells the pipeline to "enable" a particular feature.\n\n A "feature" is just a string which represents some set of functionality that needs to be enabled, such as "C2D" or "Twin".\n\n This object has no notion of what it means to "enable" a feature. That knowledge is handled by stages in the pipeline which might convert\n this operation to a more specific operation (such as an MQTT subscribe operation with a specific topic name).\n\n This operation is in the group of base operations because disconnecting is a common operation that many clients might need to do.\n\n Even though this is an base operation, it will most likely be handled by a more specific stage (such as an IoTHub or MQTT stage).\n ' + + def __init__(self, feature_name, callback): + """ + Initializer for EnableFeatureOperation objects. + + :param str feature_name: Name of the feature that is being enabled. The meaning of this + string is defined in the stage which handles this operation. + :param Function callback: The function that gets called when this operation is complete or has + failed. The callback function must accept A PipelineOperation object which indicates + the specific operation which has completed or failed. + """ + super(EnableFeatureOperation, self).__init__(callback=callback) + self.feature_name = feature_name + + +class DisableFeatureOperation(PipelineOperation): + __doc__ = '\n A PipelineOperation object which tells the pipeline to "disable" a particular feature.\n\n A "feature" is just a string which represents some set of functionality that needs to be disabled, such as "C2D" or "Twin".\n\n This object has no notion of what it means to "disable" a feature. That knowledge is handled by stages in the pipeline which might convert\n this operation to a more specific operation (such as an MQTT unsubscribe operation with a specific topic name).\n\n This operation is in the group of base operations because disconnecting is a common operation that many clients might need to do.\n\n Even though this is an base operation, it will most likely be handled by a more specific stage (such as an IoTHub or MQTT stage).\n ' + + def __init__(self, feature_name, callback): + """ + Initializer for DisableFeatureOperation objects. + + :param str feature_name: Name of the feature that is being disabled. The meaning of this + string is defined in the stage which handles this operation. + :param Function callback: The function that gets called when this operation is complete or has + failed. The callback function must accept A PipelineOperation object which indicates + the specific operation which has completed or failed. + """ + super(DisableFeatureOperation, self).__init__(callback=callback) + self.feature_name = feature_name + + +class RequestAndResponseOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which wraps the common operation of sending a request to iothub with a request_id ($rid)\n value and waiting for a response with the same $rid value. This convention is used by both Twin and Provisioning\n features.\n\n Even though this is an base operation, it will most likely be generated and also handled by more specifics stages\n (such as IoTHub or MQTT stages).\n\n The type of the request payload and the response payload is undefined at this level. The type of the payload is defined\n based on the type of request that is being executed. If types need to be converted, that is the responsibility of\n the stage which creates this operation, and also the stage which executes on the operation.\n\n :ivar status_code: The status code returned by the response. Any value under 300 is considered success.\n :type status_code: int\n :ivar response_body: The body of the response.\n :type response_body: Undefined\n :ivar query_params: Any query parameters that need to be sent with the request.\n Example is the id of the operation as returned by the initial provisioning request.\n " + + def __init__(self, request_type, method, resource_location, request_body, callback, query_params=None): + """ + Initializer for RequestAndResponseOperation objects + + :param str request_type: The type of request. This is a string which is used by protocol-specific stages to + generate the actual request. For example, if request_type is "twin", then the iothub_mqtt stage will convert + the request into an MQTT publish with topic that begins with $iothub/twin + :param str method: The method for the request, in the REST sense of the word, such as "POST", "GET", etc. + :param str resource_location: The resource that the method is acting on, in the REST sense of the word. + For twin request with method "GET", this is most likely the string "/" which retrieves the entire twin + :param request_body: The body of the request. This is a required field, and a single space can be used to denote + an empty body. + :type request_body: Undefined + :param Function callback: The function that gets called when this operation is complete or has + failed. The callback function must accept A PipelineOperation object which indicates + the specific operation which has completed or failed. + """ + super(RequestAndResponseOperation, self).__init__(callback=callback) + self.request_type = request_type + self.method = method + self.resource_location = resource_location + self.request_body = request_body + self.status_code = None + self.response_body = None + self.query_params = query_params + + +class RequestOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which is the first part of an RequestAndResponseOperation operation (the request). The second\n part of the RequestAndResponseOperation operation (the response) is returned via an ResponseEvent event.\n\n Even though this is an base operation, it will most likely be generated and also handled by more specifics stages\n (such as IoTHub or MQTT stages).\n " + + def __init__(self, request_type, method, resource_location, request_body, request_id, callback, query_params=None): + """ + Initializer for RequestOperation objects + + :param str request_type: The type of request. This is a string which is used by protocol-specific stages to + generate the actual request. For example, if request_type is "twin", then the iothub_mqtt stage will convert + the request into an MQTT publish with topic that begins with $iothub/twin + :param str method: The method for the request, in the REST sense of the word, such as "POST", "GET", etc. + :param str resource_location: The resource that the method is acting on, in the REST sense of the word. + For twin request with method "GET", this is most likely the string "/" which retrieves the entire twin + :param request_body: The body of the request. This is a required field, and a single space can be used to denote + an empty body. + :type request_body: dict, str, int, float, bool, or None (JSON compatible values) + :param Function callback: The function that gets called when this operation is complete or has + failed. The callback function must accept A PipelineOperation object which indicates + the specific operation which has completed or failed. + :type query_params: Any query parameters that need to be sent with the request. + Example is the id of the operation as returned by the initial provisioning request. + """ + super(RequestOperation, self).__init__(callback=callback) + self.method = method + self.resource_location = resource_location + self.request_type = request_type + self.request_body = request_body + self.request_id = request_id + self.query_params = query_params diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_http.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_http.py new file mode 100644 index 0000000..8a8afff --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_http.py @@ -0,0 +1,33 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_ops_http.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1765 bytes +from . import PipelineOperation + +class HTTPRequestAndResponseOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to connect to a server using the HTTP protocol.\n\n This operation is in the group of HTTP operations because its attributes are very specific to the HTTP protocol.\n " + + def __init__(self, method, path, headers, body, query_params, callback): + """ + Initializer for HTTPPublishOperation objects. + :param str method: The HTTP method used in the request + :param str path: The path to be used in the request url + :param dict headers: The headers to be used in the HTTP request + :param str body: The body to be provided with the HTTP request + :param str query_params: The query parameters to be used in the request url + :param Function callback: The function that gets called when this operation is complete or has failed. + The callback function must accept A PipelineOperation object which indicates the specific operation which + has completed or failed. + """ + super(HTTPRequestAndResponseOperation, self).__init__(callback=callback) + self.method = method + self.path = path + self.headers = headers + self.body = body + self.query_params = query_params + self.status_code = None + self.response_body = None + self.reason = None diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_mqtt.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_mqtt.py new file mode 100644 index 0000000..7c89e7d --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_ops_mqtt.py @@ -0,0 +1,65 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_ops_mqtt.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3316 bytes +from . import PipelineOperation + +class MQTTPublishOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to publish a specific payload on a specific topic using the MQTT protocol.\n\n This operation is in the group of MQTT operations because its attributes are very specific to the MQTT protocol.\n " + + def __init__(self, topic, payload, callback): + """ + Initializer for MQTTPublishOperation objects. + + :param str topic: The name of the topic to publish to + :param str payload: The payload to publish + :param Function callback: The function that gets called when this operation is complete or has failed. + The callback function must accept A PipelineOperation object which indicates the specific operation which + has completed or failed. + """ + super(MQTTPublishOperation, self).__init__(callback=callback) + self.topic = topic + self.payload = payload + self.needs_connection = True + self.retry_timer = None + + +class MQTTSubscribeOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to subscribe to a specific MQTT topic using the MQTT protocol.\n\n This operation is in the group of MQTT operations because its attributes are very specific to the MQTT protocol.\n " + + def __init__(self, topic, callback): + """ + Initializer for MQTTSubscribeOperation objects. + + :param str topic: The name of the topic to subscribe to + :param Function callback: The function that gets called when this operation is complete or has failed. + The callback function must accept A PipelineOperation object which indicates the specific operation which + has completed or failed. + """ + super(MQTTSubscribeOperation, self).__init__(callback=callback) + self.topic = topic + self.needs_connection = True + self.timeout_timer = None + self.retry_timer = None + + +class MQTTUnsubscribeOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to unsubscribe from a specific MQTT topic using the MQTT protocol.\n\n This operation is in the group of MQTT operations because its attributes are very specific to the MQTT protocol.\n " + + def __init__(self, topic, callback): + """ + Initializer for MQTTUnsubscribeOperation objects. + + :param str topic: The name of the topic to unsubscribe from + :param Function callback: The function that gets called when this operation is complete or has failed. + The callback function must accept A PipelineOperation object which indicates the specific operation which + has completed or failed. + """ + super(MQTTUnsubscribeOperation, self).__init__(callback=callback) + self.topic = topic + self.needs_connection = True + self.timeout_timer = None + self.retry_timer = None diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_base.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_base.py new file mode 100644 index 0000000..5f8dc61 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_base.py @@ -0,0 +1,685 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_stages_base.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 53129 bytes +import logging, abc, six, sys, time, traceback, uuid, weakref, threading +from six.moves import queue +from . import pipeline_events_base +from . import pipeline_ops_base, pipeline_ops_mqtt +from . import pipeline_thread +from . import pipeline_exceptions +from azure.iot.device.common import handle_exceptions, transport_exceptions +from azure.iot.device.common.auth import sastoken as st +from azure.iot.device.common.callable_weak_method import CallableWeakMethod +logger = logging.getLogger(__name__) + +@six.add_metaclass(abc.ABCMeta) +class PipelineStage(object): + __doc__ = '\n Base class representing a stage in the processing pipeline. Each stage is responsible for receiving\n PipelineOperation objects from the top, possibly processing them, and possibly passing them down. It\n is also responsible for receiving PipelineEvent objects from the bottom, possibly processing them, and\n possibly passing them up.\n\n Each PipelineStage in the pipeline, is expected to act on some well-defined set of PipelineOperation\n types and/or some set of PipelineEvent types. If any stage does not act on an operation or event, it\n should pass it to the next stage (for operations) or the previous stage (for events). In this way, the\n pipeline implements the "chain of responsibility" design pattern (Gamma, et.al. "Design Patterns".\n Addison Wesley. 1995), with each stage being responsible for implementing some "rule" or "policy" of the\n pipeline, and each stage being ignorant of the stages that are before or after it in the pipeline.\n\n Each stage in the pipeline should act on the smallest set of rules possible, thus making stages small\n and easily testable. Complex logic should be the exception and not the rule, and complex stages should\n operate on the most generic type of operation possible, thus allowing us to re-use complex logic for\n multiple cases. The best way to do this is with "converter" stages that convert a specific operation to\n a more general one and with other converter stages that convert general operations to more specific ones.\n\n An example of a specific-to-generic stage is UseSkAuthProviderStage which takes a specific operation\n (use an auth provider) and converts it into something more generic (here is your device_id, etc, and use\n this SAS token when connecting).\n\n An example of a generic-to-specific stage is IoTHubMQTTTranslationStage which converts IoTHub operations\n (such as SendD2CMessageOperation) to MQTT operations (such as Publish).\n\n Each stage should also work in the broadest domain possible. For example a generic stage (say\n "AutoConnectStage") that initiates a connection if any arbitrary operation needs a connection is more useful\n than having some MQTT-specific code that re-connects to the MQTT broker if the user calls Publish and\n there\'s no connection.\n\n One way to think about stages is to look at every "block of functionality" in your code and ask yourself\n "is this the one and only time I will need this code"? If the answer is no, it might be worthwhile to\n implement that code in it\'s own stage in a very generic way.\n\n\n :ivar name: The name of the stage. This is used primarily for logging\n :type name: str\n :ivar next: The next stage in the pipeline. Set to None if this is the last stage in the pipeline.\n :type next: PipelineStage\n :ivar previous: The previous stage in the pipeline. Set to None if this is the first stage in the pipeline.\n :type previous: PipelineStage\n :ivar pipeline_root: The first stage (root) of the pipeline. This is useful if a stage wants to\n submit an operation to the pipeline starting at the root. This type of behavior is uncommon but not\n unexpected.\n :type pipeline_root: PipelineStage\n ' + + def __init__(self): + """ + Initializer for PipelineStage objects. + """ + self.name = self.__class__.__name__ + self.next = None + self.previous = None + self.pipeline_root = None + + @pipeline_thread.runs_on_pipeline_thread + def run_op(self, op): + """ + Run the given operation. This is the public function that outside callers would call to run an + operation. Derived classes should override the private _run_op function to implement + stage-specific behavior. When run_op returns, that doesn't mean that the operation has executed + to completion. Rather, it means that the pipeline has done something that will cause the + operation to eventually execute to completion. That might mean that something was sent over + the network and some stage is waiting for a reply, or it might mean that the operation is sitting + in a queue until something happens, or it could mean something entirely different. The only + thing you can assume is that the operation will _eventually_ complete successfully or fail, and the + operation's callback will be called when that happens. + + :param PipelineOperation op: The operation to run. + """ + try: + self._run_op(op) + except Exception as e: + try: + logger.warning(msg=("Unexpected error in {}._run_op() call".format(self))) + logger.warning(traceback.format_exc()) + op.complete(error=e) + finally: + e = None + del e + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + """ + Implementation of the stage-specific function of .run_op(). Override this method instead of + .run_op() in child classes in order to change how a stage behaves when running an operation. + + See the description of the .run_op() method for more discussion on what it means to "run" + an operation. + + :param PipelineOperation op: The operation to run. + """ + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def handle_pipeline_event(self, event): + """ + Handle a pipeline event that arrives from the stage below this stage. Derived + classes should not override this function. Any stage-specific handling of + PipelineEvent objects should be implemented by overriding the private + _handle_pipeline_event function in the derived stage. + + :param PipelineEvent event: The event that is being passed back up the pipeline + """ + try: + self._handle_pipeline_event(event) + except Exception as e: + try: + logger.error(msg=("Unexpected error in {}._handle_pipeline_event() call".format(self))) + handle_exceptions.handle_background_exception(e) + finally: + e = None + del e + + @pipeline_thread.runs_on_pipeline_thread + def _handle_pipeline_event(self, event): + """ + Handle a pipeline event that arrives from the stage below this stage. This + is a function that is intended to be overridden in any stages that want to implement + stage-specific handling of any events + + :param PipelineEvent event: The event that is being passed back up the pipeline + """ + self.send_event_up(event) + + @pipeline_thread.runs_on_pipeline_thread + def send_op_down(self, op): + """ + Helper function to continue a given operation by passing it to the next stage + in the pipeline. If there is no next stage in the pipeline, this function + will fail the operation and call complete_op to return the failure back up the + pipeline. + + :param PipelineOperation op: Operation which is being passed on + """ + if not self.next: + logger.error("{}({}): no next stage. completing with error".format(self.name, op.name)) + error = pipeline_exceptions.PipelineError("{} not handled after {} stage with no next stage".format(op.name, self.name)) + op.complete(error=error) + else: + self.next.run_op(op) + + @pipeline_thread.runs_on_pipeline_thread + def send_event_up(self, event): + """ + Helper function to pass an event to the previous stage of the pipeline. This is the default + behavior of events while traveling through the pipeline. They start somewhere (maybe the + bottom) and move up the pipeline until they're handled or until they error out. + """ + if self.previous: + self.previous.handle_pipeline_event(event) + else: + error = pipeline_exceptions.PipelineError("{} unhandled at {} stage with no previous stage".format(event.name, self.name)) + handle_exceptions.handle_background_exception(error) + + +class PipelineRootStage(PipelineStage): + __doc__ = '\n Object representing the root of a pipeline. This is where the functions to build\n the pipeline exist. This is also where clients can add event handlers to receive\n events from the pipeline.\n\n :ivar on_pipeline_event_handler: Handler which can be set by users of the pipeline to\n receive PipelineEvent objects. This is how users receive any "unsolicited"\n events from the pipeline (such as C2D messages). This function is called with\n a PipelineEvent object every time any such event occurs.\n :type on_pipeline_event_handler: Function\n :ivar on_connected_handler: Handler which can be set by users of the pipeline to\n receive events every time the underlying transport connects\n :type on_connected_handler: Function\n :ivar on_disconnected_handler: Handler which can be set by users of the pipeline to\n receive events every time the underlying transport disconnects\n :type on_disconnected_handler: Function\n ' + + def __init__(self, pipeline_configuration): + super(PipelineRootStage, self).__init__() + self.on_pipeline_event_handler = None + self.on_connected_handler = None + self.on_disconnected_handler = None + self.connected = False + self.pipeline_configuration = pipeline_configuration + + def run_op(self, op): + op.callback_stack[0] = pipeline_thread.invoke_on_callback_thread_nowait(op.callback_stack[0]) + pipeline_thread.invoke_on_pipeline_thread(super(PipelineRootStage, self).run_op)(op) + + def append_stage(self, new_stage): + """ + Add the next stage to the end of the pipeline. This is the function that callers + use to build the pipeline by appending stages. This function returns the root of + the pipeline so that calls to this function can be chained together. + + :param PipelineStage new_stage: Stage to add to the end of the pipeline + :returns: The root of the pipeline. + """ + old_tail = self + while old_tail.next: + old_tail = old_tail.next + + old_tail.next = new_stage + new_stage.previous = old_tail + new_stage.pipeline_root = self + return self + + @pipeline_thread.runs_on_pipeline_thread + def _handle_pipeline_event(self, event): + """ + Override of the PipelineEvent handler. Because this is the root of the pipeline, + this function calls the on_pipeline_event_handler to pass the event to the + caller. + + :param PipelineEvent event: Event to be handled, i.e. returned to the caller + through the handle_pipeline_event (if provided). + """ + if isinstance(event, pipeline_events_base.ConnectedEvent): + logger.debug("{}: ConnectedEvent received. Calling on_connected_handler".format(self.name)) + self.connected = True + if self.on_connected_handler: + pipeline_thread.invoke_on_callback_thread_nowait(self.on_connected_handler)() + elif isinstance(event, pipeline_events_base.DisconnectedEvent): + logger.debug("{}: DisconnectedEvent received. Calling on_disconnected_handler".format(self.name)) + self.connected = False + if self.on_disconnected_handler: + pipeline_thread.invoke_on_callback_thread_nowait(self.on_disconnected_handler)() + elif self.on_pipeline_event_handler: + pipeline_thread.invoke_on_callback_thread_nowait(self.on_pipeline_event_handler)(event) + else: + logger.error("incoming {} event with no handler. dropping.".format(event.name)) + + +class SasTokenRenewalStage(PipelineStage): + DEFAULT_TOKEN_RENEWAL_MARGIN = 120 + + def __init__(self): + super(SasTokenRenewalStage, self).__init__() + self._token_renewal_timer = None + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_base.InitializePipelineOperation) and isinstance(self.pipeline_root.pipeline_configuration.sastoken, st.RenewableSasToken): + self._start_renewal_timer() + self.send_op_down(op) + else: + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _cancel_token_renewal_timer(self): + """Cancel and delete any pending renewal timer""" + timer = self._token_renewal_timer + self._token_renewal_timer = None + if timer: + logger.debug("Cancelling SAS Token renewal timer") + timer.cancel() + + @pipeline_thread.runs_on_pipeline_thread + def _start_renewal_timer(self): + """Begin a renewal timer. + When the timer expires, and the token is renewed, a new timer will be set""" + self._cancel_token_renewal_timer() + seconds_until_renewal = self.pipeline_root.pipeline_configuration.sastoken.ttl - self.DEFAULT_TOKEN_RENEWAL_MARGIN + if seconds_until_renewal < 0: + handle_exceptions.handle_background_exception(pipeline_exceptions.PipelineError("SasToken TTL less than Renewal Margin!")) + else: + logger.debug("Scheduling SAS Token renewal for {} seconds in the future".format(seconds_until_renewal)) + self_weakref = weakref.ref(self) + + @pipeline_thread.runs_on_pipeline_thread + def on_reauthorize_complete(op, error): + this = self_weakref() + if error: + logger.info("{}({}): reauthorize connection operation failed. Error={}".format(this.name, op.name, error)) + handle_exceptions.handle_background_exception(error) + else: + logger.info("{}({}): reauthorize connection operation is complete".format(this.name, op.name)) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def renew_token(): + this = self_weakref() + logger.info("Renewing SAS Token") + sastoken = this.pipeline_root.pipeline_configuration.sastoken + sastoken.refresh() + if this.pipeline_root.connected: + this.send_op_down(pipeline_ops_base.ReauthorizeConnectionOperation(callback=on_reauthorize_complete)) + this._start_renewal_timer() + + self._token_renewal_timer = threading.Timer(seconds_until_renewal, renew_token) + self._token_renewal_timer.daemon = True + self._token_renewal_timer.start() + + +class AutoConnectStage(PipelineStage): + __doc__ = "\n This stage is responsible for ensuring that the protocol is connected when\n it needs to be connected.\n " + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if op.needs_connection: + if self.pipeline_root.connected: + + @pipeline_thread.runs_on_pipeline_thread + def check_for_connection_failure(op, error): + if error: + if not self.pipeline_root.connected: + logger.debug("{}({}): op failed with {} and we're not conencted. Re-submitting.".format(self.name, op.name, error)) + op.halt_completion() + self.run_op(op) + + op.add_callback(check_for_connection_failure) + logger.debug("{}({}): Connected. Sending down and adding callback to check result".format(self.name, op.name)) + self.send_op_down(op) + else: + logger.debug("{}({}): Op needs connection. Queueing this op and starting a ConnectionOperation".format(self.name, op.name)) + self._do_connect(op) + else: + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _do_connect(self, op): + """ + Start connecting the transport in response to some operation + """ + op_needs_complete = op + + @pipeline_thread.runs_on_pipeline_thread + def on_connect_op_complete(op, error): + if error: + logger.debug("{}({}): Connection failed. Completing with failure because of connection failure: {}".format(self.name, op_needs_complete.name, error)) + op_needs_complete.complete(error=error) + else: + logger.debug("{}({}): connection is complete. Running op that triggered connection.".format(self.name, op_needs_complete.name)) + self.run_op(op_needs_complete) + + logger.debug("{}({}): calling down with Connect operation".format(self.name, op.name)) + self.send_op_down(pipeline_ops_base.ConnectOperation(callback=on_connect_op_complete)) + + +class ConnectionLockStage(PipelineStage): + __doc__ = '\n This stage is responsible for serializing connect, disconnect, and reauthorize ops on\n the pipeline, such that only a single one of these ops can go past this stage at a\n time. This way, we don\'t have to worry about cases like "what happens if we try to\n disconnect if we\'re in the middle of reauthorizing." This stage will wait for the\n reauthorize to complete before letting the disconnect past.\n ' + + def __init__(self): + super(ConnectionLockStage, self).__init__() + self.queue = queue.Queue() + self.blocked = False + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if self.blocked: + logger.debug("{}({}): pipeline is blocked waiting for a prior connect/disconnect/reauthorize to complete. queueing.".format(self.name, op.name)) + self.queue.put_nowait(op) + else: + if isinstance(op, pipeline_ops_base.ConnectOperation) and self.pipeline_root.connected: + logger.info("{}({}): Transport is already connected. Completing.".format(self.name, op.name)) + op.complete() + else: + if isinstance(op, pipeline_ops_base.DisconnectOperation): + self.pipeline_root.connected or logger.info("{}({}): Transport is already disconnected. Completing.".format(self.name, op.name)) + op.complete() + else: + if not isinstance(op, pipeline_ops_base.DisconnectOperation): + if isinstance(op, pipeline_ops_base.ConnectOperation) or isinstance(op, pipeline_ops_base.ReauthorizeConnectionOperation): + self._block(op) + + @pipeline_thread.runs_on_pipeline_thread + def on_operation_complete(op, error): + if error: + logger.debug("{}({}): op failed. Unblocking queue with error: {}".format(self.name, op.name, error)) + else: + logger.debug("{}({}): op succeeded. Unblocking queue".format(self.name, op.name)) + self._unblock(op, error) + + op.add_callback(on_operation_complete) + self.send_op_down(op) + else: + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _block(self, op): + """ + block this stage while we're waiting for the connect/disconnect/reauthorize operation to complete. + """ + logger.debug("{}({}): blocking".format(self.name, op.name)) + self.blocked = True + + @pipeline_thread.runs_on_pipeline_thread + def _unblock(self, op, error): + """ + Unblock this stage after the connect/disconnect/reauthorize operation is complete. This also means + releasing all the operations that were queued up. + """ + logger.debug("{}({}): unblocking and releasing queued ops.".format(self.name, op.name)) + self.blocked = False + logger.debug("{}({}): processing {} items in queue for error={}".format(self.name, op.name, self.queue.qsize(), error)) + old_queue = self.queue + self.queue = queue.Queue() + while not old_queue.empty(): + op_to_release = old_queue.get_nowait() + if error: + logger.debug("{}({}): failing {} op because of error".format(self.name, op.name, op_to_release.name)) + op_to_release.complete(error=error) + else: + logger.debug("{}({}): releasing {} op.".format(self.name, op.name, op_to_release.name)) + self.run_op(op_to_release) + + +class CoordinateRequestAndResponseStage(PipelineStage): + __doc__ = "\n Pipeline stage which is responsible for coordinating RequestAndResponseOperation operations. For each\n RequestAndResponseOperation operation, this stage passes down a RequestOperation operation and waits for\n an ResponseEvent event. All other events are passed down unmodified.\n " + + def __init__(self): + super(CoordinateRequestAndResponseStage, self).__init__() + self.pending_responses = {} + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_base.RequestAndResponseOperation): + request_id = str(uuid.uuid4()) + logger.debug("{}({}): adding request {} to pending list".format(self.name, op.name, request_id)) + self.pending_responses[request_id] = op + self._send_request_down(request_id, op) + else: + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _send_request_down(self, request_id, op): + op_waiting_for_response = op + + @pipeline_thread.runs_on_pipeline_thread + def on_send_request_doneParse error at or near `COME_FROM' instruction at offset 84_0 + + logger.debug("{}({}): Sending {} request to {} resource {}".format(self.name, op.name, op.request_type, op.method, op.resource_location)) + new_op = pipeline_ops_base.RequestOperation(method=(op.method), + resource_location=(op.resource_location), + request_body=(op.request_body), + request_id=request_id, + request_type=(op.request_type), + callback=on_send_request_done, + query_params=(op.query_params)) + self.send_op_down(new_op) + + @pipeline_thread.runs_on_pipeline_thread + def _handle_pipeline_event(self, event): + if isinstance(event, pipeline_events_base.ResponseEvent): + logger.debug("{}({}): Handling event with request_id {}".format(self.name, event.name, event.request_id)) + if event.request_id in self.pending_responses: + op = self.pending_responses[event.request_id] + del self.pending_responses[event.request_id] + op.status_code = event.status_code + op.response_body = event.response_body + op.retry_after = event.retry_after + logger.debug("{}({}): Completing {} request to {} resource {} with status {}".format(self.name, op.name, op.request_type, op.method, op.resource_location, op.status_code)) + op.complete() + else: + logger.info("{}({}): request_id {} not found in pending list. Nothing to do. Dropping".format(self.name, event.name, event.request_id)) + else: + if isinstance(event, pipeline_events_base.ConnectedEvent): + self.send_event_up(event) + for request_id in self.pending_responses: + logger.info("{stage}: ConnectedEvent: re-publishing request {id} for {method} {type} ".format(stage=(self.name), + id=request_id, + method=(self.pending_responses[request_id].method), + type=(self.pending_responses[request_id].request_type))) + self._send_request_down(request_id, self.pending_responses[request_id]) + + else: + self.send_event_up(event) + + +class OpTimeoutStage(PipelineStage): + __doc__ = '\n The purpose of the timeout stage is to add timeout errors to select operations\n\n The timeout_intervals attribute contains a list of operations to track along with\n their timeout values. Right now this list is hard-coded but the operations and\n intervals will eventually become a parameter.\n\n For each operation that needs a timeout check, this stage will add a timer to\n the operation. If the timer elapses, this stage will fail the operation with\n a PipelineTimeoutError. The intention is that a higher stage will know what to\n do with that error and act accordingly (either return the error to the user or\n retry).\n\n This stage currently assumes that all timed out operation are just "lost".\n It does not attempt to cancel the operation, as Paho doesn\'t have a way to\n cancel an operation, and with QOS=1, sending a pub or sub twice is not\n catastrophic.\n\n Also, as a long-term plan, the operations that need to be watched for timeout\n will become an initialization parameter for this stage so that differet\n instances of this stage can watch for timeouts on different operations.\n This will be done because we want a lower-level timeout stage which can watch\n for timeouts at the MQTT level, and we want a higher-level timeout stage which\n can watch for timeouts at the iothub level. In this way, an MQTT operation that\n times out can be retried as an MQTT operation and a higher-level IoTHub operation\n which times out can be retried as an IoTHub operation (which might necessitate\n redoing multiple MQTT operations).\n ' + + def __init__(self): + super(OpTimeoutStage, self).__init__() + self.timeout_intervals = {(pipeline_ops_mqtt.MQTTSubscribeOperation): 10, + (pipeline_ops_mqtt.MQTTUnsubscribeOperation): 10} + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if type(op) in self.timeout_intervals: + self_weakref = weakref.ref(self) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def on_timeout(): + this = self_weakref() + logger.info("{}({}): returning timeout error".format(this.name, op.name)) + op.complete(error=(pipeline_exceptions.PipelineTimeoutError("operation timed out before protocol client could respond"))) + + logger.debug("{}({}): Creating timer".format(self.name, op.name)) + op.timeout_timer = threading.Timer(self.timeout_intervals[type(op)], on_timeout) + op.timeout_timer.start() + op.add_callback(self._clear_timer) + logger.debug("{}({}): Sending down".format(self.name, op.name)) + self.send_op_down(op) + else: + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _clear_timer(self, op, error): + if op.timeout_timer: + logger.debug("{}({}): Cancelling timer".format(self.name, op.name)) + op.timeout_timer.cancel() + op.timeout_timer = None + + +class RetryStage(PipelineStage): + __doc__ = '\n The purpose of the retry stage is to watch specific operations for specific\n errors and retry the operations as appropriate.\n\n Unlike the OpTimeoutStage, this stage will never need to worry about cancelling\n failed operations. When an operation is retried at this stage, it is already\n considered "failed", so no cancellation needs to be done.\n ' + + def __init__(self): + super(RetryStage, self).__init__() + self.retry_intervals = {(pipeline_ops_mqtt.MQTTSubscribeOperation): 20, + (pipeline_ops_mqtt.MQTTUnsubscribeOperation): 20, + (pipeline_ops_mqtt.MQTTPublishOperation): 20} + self.ops_waiting_to_retry = [] + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + """ + Send all ops down and intercept their return to "watch for retry" + """ + if self._should_watch_for_retry(op): + op.add_callback(self._do_retry_if_necessary) + self.send_op_down(op) + else: + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _should_watch_for_retry(self, op): + """ + Return True if this op needs to be watched for retry. This can be + called before the op runs. + """ + return type(op) in self.retry_intervals + + @pipeline_thread.runs_on_pipeline_thread + def _should_retry(self, op, error): + """ + Return True if this op needs to be retried. This must be called after + the op completes. + """ + if error: + if self._should_watch_for_retry(op): + if isinstance(error, pipeline_exceptions.PipelineTimeoutError): + return True + return False + + @pipeline_thread.runs_on_pipeline_thread + def _do_retry_if_necessary(self, op, error): + """ + Handler which gets called when operations are complete. This function + is where we check to see if a retry is necessary and set a "retry timer" + which can be used to send the op down again. + """ + if self._should_retry(op, error): + self_weakref = weakref.ref(self) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def do_retry(): + this = self_weakref() + logger.debug("{}({}): retrying".format(this.name, op.name)) + op.retry_timer.cancel() + op.retry_timer = None + this.ops_waiting_to_retry.remove(op) + this.run_op(op) + + interval = self.retry_intervals[type(op)] + logger.info("{}({}): Op needs retry with interval {} because of {}. Setting timer.".format(self.name, op.name, interval, error)) + op.halt_completion() + self.ops_waiting_to_retry.append(op) + op.retry_timer = threading.Timer(self.retry_intervals[type(op)], do_retry) + op.retry_timer.start() + else: + if op.retry_timer: + op.retry_timer.cancel() + op.retry_timer = None + + +transient_connect_errors = [ + pipeline_exceptions.OperationCancelled, + pipeline_exceptions.PipelineTimeoutError, + pipeline_exceptions.OperationError, + transport_exceptions.ConnectionFailedError, + transport_exceptions.ConnectionDroppedError] + +class ReconnectState(object): + __doc__ = "\n Class which holds reconenct states as class variables. Created to make code that reads like an enum without using an enum.\n\n WAITING_TO_RECONNECT: This stage is in a waiting period before reconnecting. This state implies\n that the user wants the pipeline to be connected. ie. After a successful connection, the\n state will change to LOGICALLY_CONNECTED\n\n LOGICALLY_CONNECTED: The client wants the pipeline to be connected. This state is independent\n of the actual connection state since the pipeline could be logically connected but\n physically disconnected (this is a temporary condition though. If we're logically connected\n and physically disconnected, then we should be waiting to reconnect.\n\n LOGICALLY_DISCONNECTED: The client does not want the pipeline to be connected or the pipeline had\n a permanent errors error and was forced to disconnect. If the state is LOGICALLY_DISCONNECTED, then the pipeline\n should be physically disconnected since there is no reason to leave the pipeline connected in this state.\n " + WAITING_TO_RECONNECT = "WAITING_TO_RECONNECT" + LOGICALLY_CONNECTED = "LOGICALLY_CONNECTED" + LOGICALLY_DISCONNECTED = "LOGICALLY_DISCONNECTED" + + +class ReconnectStage(PipelineStage): + + def __init__(self): + super(ReconnectStage, self).__init__() + self.reconnect_timer = None + self.state = ReconnectState.LOGICALLY_DISCONNECTED + self.never_connected = True + self.reconnect_delay = 10 + self.waiting_connect_ops = [] + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_base.ConnectOperation): + if self.state == ReconnectState.WAITING_TO_RECONNECT: + logger.debug("{}({}): State is {}. Adding to wait list".format(self.name, op.name, self.state)) + self.waiting_connect_ops.append(op) + else: + logger.info("{}({}): State changes {}->LOGICALLY_CONNECTED. Adding to wait list and sending new connect op down".format(self.name, op.name, self.state)) + self.state = ReconnectState.LOGICALLY_CONNECTED + self.waiting_connect_ops.append(op) + self._send_new_connect_op_down() + else: + if isinstance(op, pipeline_ops_base.DisconnectOperation): + if self.state == ReconnectState.WAITING_TO_RECONNECT: + logger.info("{}({}): State changes {}->LOGICALLY_DISCONNECTED. Canceling waiting ops and sending disconnect down.".format(self.name, op.name, self.state)) + self.state = ReconnectState.LOGICALLY_DISCONNECTED + self._clear_reconnect_timer() + self._complete_waiting_connect_ops(pipeline_exceptions.OperationCancelled("Explicit disconnect invoked")) + op.complete() + else: + logger.info("{}({}): State changes {}->LOGICALLY_DISCONNECTED. Sending op down.".format(self.name, op.name, self.state)) + self.state = ReconnectState.LOGICALLY_DISCONNECTED + self.send_op_down(op) + else: + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _handle_pipeline_event(self, event): + if isinstance(event, pipeline_events_base.DisconnectedEvent): + logger.debug("{}({}): State is {} Connected is {}.".format(self.name, event.name, self.state, self.pipeline_root.connected)) + if self.pipeline_root.connected and self.state == ReconnectState.LOGICALLY_CONNECTED: + self.state = ReconnectState.WAITING_TO_RECONNECT + self._start_reconnect_timer(0.01) + else: + self.send_event_up(event) + else: + self.send_event_up(event) + + @pipeline_thread.runs_on_pipeline_thread + def _send_new_connect_op_down(self): + self_weakref = weakref.ref(self) + + @pipeline_thread.runs_on_pipeline_thread + def on_connect_complete(op, error): + this = self_weakref() + if this: + logger.debug("{}({}): on_connect_complete error={} state={} never_connected={} connected={} ".format(this.name, op.name, error, this.state, this.never_connected, this.pipeline_root.connected)) + if error: + if this.never_connected: + this.state = ReconnectState.LOGICALLY_DISCONNECTED + this._clear_reconnect_timer() + this._complete_waiting_connect_ops(error) + else: + if type(error) in transient_connect_errors: + self.state = ReconnectState.WAITING_TO_RECONNECT + self._start_reconnect_timer(self.reconnect_delay) + else: + this.state = ReconnectState.LOGICALLY_DISCONNECTED + this._clear_reconnect_timer() + this._complete_waiting_connect_ops(error) + else: + this.never_connected = False + this.state = ReconnectState.LOGICALLY_CONNECTED + this._clear_reconnect_timer() + this._complete_waiting_connect_ops() + + logger.debug("{}: sending new connect op down".format(self.name)) + op = pipeline_ops_base.ConnectOperation(callback=on_connect_complete) + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _start_reconnect_timer(self, delay): + """ + Set a timer to reconnect after some period of time + """ + logger.debug("{}: State is {}. Connected={} Starting reconnect timer".format(self.name, self.state, self.pipeline_root.connected)) + self._clear_reconnect_timer() + self_weakref = weakref.ref(self) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def on_reconnect_timer_expired(): + this = self_weakref() + logger.debug("{}: Reconnect timer expired. State is {} Connected is {}.".format(self.name, self.state, self.pipeline_root.connected)) + this.reconnect_timer = None + if this.state == ReconnectState.WAITING_TO_RECONNECT: + if not self.pipeline_root.connected: + this.state = ReconnectState.LOGICALLY_CONNECTED + this._send_new_connect_op_down() + + self.reconnect_timer = threading.Timer(delay, on_reconnect_timer_expired) + self.reconnect_timer.start() + + @pipeline_thread.runs_on_pipeline_thread + def _clear_reconnect_timer(self): + """ + Clear any previous reconnect timer + """ + if self.reconnect_timer: + logger.debug("{}: clearing reconnect timer".format(self.name)) + self.reconnect_timer.cancel() + self.reconnect_timer = None + + @pipeline_thread.runs_on_pipeline_thread + def _complete_waiting_connect_ops(self, error=None): + """ + A note of explanation: when we are waiting to reconnect, we need to keep a list of + all connect ops that come through here. We do this for 2 reasons: + + 1. We don't want to pass them down immediately because we want to honor the waiting + period. If we passed them down immediately, we'd try to reconnect immediately + instead of waiting until reconnect_timer fires. + + 2. When we're retrying, there are new ConnectOperation ops sent down regularly. + Any of the ops could be the one that succeeds. When that happens, we need a + way to to complete all of the ops that are patiently waiting for the connection. + + Right now, we only need to do this with ConnectOperation ops because these are the + only ops that need to wait because these are the only ops that cause a connection + to be established. Other ops pass through this stage, and might fail in later + stages, but that's OK. If they needed a connection, the AutoConnectStage before + this stage should be taking care of that. + """ + logger.debug("{}: completing waiting ops with error={}".format(self.name, error)) + list_copy = self.waiting_connect_ops + self.waiting_connect_ops = [] + for op in list_copy: + op.complete(error) \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_http.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_http.py new file mode 100644 index 0000000..2e25c99 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_http.py @@ -0,0 +1,67 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_stages_http.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 5150 bytes +import logging, six, traceback, copy +from . import pipeline_ops_base, PipelineStage, pipeline_ops_http, pipeline_thread, pipeline_exceptions +from azure.iot.device.common.http_transport import HTTPTransport +from azure.iot.device.common import handle_exceptions, transport_exceptions +from azure.iot.device.common.callable_weak_method import CallableWeakMethod +logger = logging.getLogger(__name__) + +class HTTPTransportStage(PipelineStage): + __doc__ = "\n PipelineStage object which is responsible for interfacing with the HTTP protocol wrapper object.\n This stage handles all HTTP operations that are not specific to IoT Hub.\n " + + def __init__(self): + super(HTTPTransportStage, self).__init__() + self.sas_token = None + self.transport = None + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_base.InitializePipelineOperation): + if self.pipeline_root.pipeline_configuration.gateway_hostname: + logger.debug("Gateway Hostname Present. Setting Hostname to: {}".format(self.pipeline_root.pipeline_configuration.gateway_hostname)) + hostname = self.pipeline_root.pipeline_configuration.gateway_hostname + else: + logger.debug("Gateway Hostname not present. Setting Hostname to: {}".format(self.pipeline_root.pipeline_configuration.hostname)) + hostname = self.pipeline_root.pipeline_configuration.hostname + logger.debug("{}({}): got connection args".format(self.name, op.name)) + self.transport = HTTPTransport(hostname=hostname, + server_verification_cert=(self.pipeline_root.pipeline_configuration.server_verification_cert), + x509_cert=(self.pipeline_root.pipeline_configuration.x509), + cipher=(self.pipeline_root.pipeline_configuration.cipher)) + self.pipeline_root.transport = self.transport + op.complete() + else: + if isinstance(op, pipeline_ops_http.HTTPRequestAndResponseOperation): + logger.debug("{}({}): Generating HTTP request and setting callback before completing.".format(self.name, op.name)) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def on_request_completed(error=None, response=None): + if error: + logger.debug("{}({}): Error passed to on_request_completed. Error={}".format(self.name, op.name, error)) + op.complete(error=error) + else: + logger.debug("{}({}): Request completed. Completing op.".format(self.name, op.name)) + logger.debug("HTTP Response Status: {}".format(response["status_code"])) + logger.debug("HTTP Response: {}".format(response["resp"].decode("utf-8"))) + op.response_body = response["resp"] + op.status_code = response["status_code"] + op.reason = response["reason"] + op.complete() + + http_headers = copy.deepcopy(op.headers) + if self.pipeline_root.pipeline_configuration.sastoken: + http_headers["Authorization"] = str(self.pipeline_root.pipeline_configuration.sastoken) + self.transport.request(method=(op.method), + path=(op.path), + headers=http_headers, + query_params=(op.query_params), + body=(op.body), + callback=on_request_completed) + else: + self.send_op_down(op) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_mqtt.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_mqtt.py new file mode 100644 index 0000000..5d34acb --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_stages_mqtt.py @@ -0,0 +1,151 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_stages_mqtt.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 17042 bytes +import logging, six, traceback, threading, weakref +from . import pipeline_ops_base, PipelineStage, pipeline_ops_mqtt, pipeline_events_mqtt, pipeline_thread, pipeline_exceptions, pipeline_events_base +from azure.iot.device.common.mqtt_transport import MQTTTransport +from azure.iot.device.common import handle_exceptions, transport_exceptions +from azure.iot.device.common.callable_weak_method import CallableWeakMethod +logger = logging.getLogger(__name__) +WATCHDOG_INTERVAL = 10 + +class MQTTTransportStage(PipelineStage): + __doc__ = "\n PipelineStage object which is responsible for interfacing with the MQTT protocol wrapper object.\n This stage handles all MQTT operations and any other operations (such as ConnectOperation) which\n is not in the MQTT group of operations, but can only be run at the protocol level.\n " + + def __init__(self): + super(MQTTTransportStage, self).__init__() + self.transport = None + self._pending_connection_op = None + + @pipeline_thread.runs_on_pipeline_thread + def _cancel_pending_connection_op(self, error=None): + """ + Cancel any running connect, disconnect or reauthorize_connection op. Since our ability to "cancel" is fairly limited, + all this does (for now) is to fail the operation + """ + op = self._pending_connection_op + if op: + if not error: + error = pipeline_exceptions.OperationCancelled("Cancelling because new ConnectOperation, DisconnectOperation, or ReauthorizeConnectionOperation was issued") + self._cancel_connection_watchdog(op) + op.complete(error=error) + self._pending_connection_op = None + + @pipeline_thread.runs_on_pipeline_thread + def _start_connection_watchdog(self, connection_op): + logger.debug("{}({}): Starting watchdog".format(self.name, connection_op.name)) + self_weakref = weakref.ref(self) + op_weakref = weakref.ref(connection_op) + + @pipeline_thread.invoke_on_pipeline_thread + def watchdog_function(): + this = self_weakref() + op = op_weakref() + if this: + if op: + if this._pending_connection_op is op: + logger.info("{}({}): Connection watchdog expired. Cancelling op".format(this.name, op.name)) + this.transport.disconnect() + if this.pipeline_root.connected: + logger.info("{}({}): Pipeline is still connected on watchdog expiration. Sending DisconnectedEvent".format(this.name, op.name)) + this.send_event_up(pipeline_events_base.DisconnectedEvent()) + this._cancel_pending_connection_op(error=(pipeline_exceptions.OperationCancelled("Transport timeout on connection operation"))) + + connection_op.watchdog_timer = threading.Timer(WATCHDOG_INTERVAL, watchdog_function) + connection_op.watchdog_timer.daemon = True + connection_op.watchdog_timer.start() + + @pipeline_thread.runs_on_pipeline_thread + def _cancel_connection_watchdog(self, op): + try: + if op.watchdog_timer: + logger.debug("{}({}): cancelling watchdog".format(self.name, op.name)) + op.watchdog_timer.cancel() + op.watchdog_timer = None + except AttributeError: + pass + + @pipeline_thread.runs_on_pipeline_thread + def _run_opParse error at or near `COME_FROM' instruction at offset 920_0 + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def _on_mqtt_message_received(self, topic, payload): + """ + Handler that gets called by the protocol library when an incoming message arrives. + Convert that message into a pipeline event and pass it up for someone to handle. + """ + logger.debug("{}: message received on topic {}".format(self.name, topic)) + self.send_event_up(pipeline_events_mqtt.IncomingMQTTMessageEvent(topic=topic, payload=payload)) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def _on_mqtt_connected(self): + """ + Handler that gets called by the transport when it connects. + """ + logger.info("_on_mqtt_connected called") + self.send_event_up(pipeline_events_base.ConnectedEvent()) + if isinstance(self._pending_connection_op, pipeline_ops_base.ConnectOperation): + logger.debug("completing connect op") + op = self._pending_connection_op + self._cancel_connection_watchdog(op) + self._pending_connection_op = None + op.complete() + else: + logger.info("Connection was unexpected") + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def _on_mqtt_connection_failure(self, cause): + """ + Handler that gets called by the transport when a connection fails. + + :param Exception cause: The Exception that caused the connection failure. + """ + logger.info("{}: _on_mqtt_connection_failure called: {}".format(self.name, cause)) + if isinstance(self._pending_connection_op, pipeline_ops_base.ConnectOperation): + logger.debug("{}: failing connect op".format(self.name)) + op = self._pending_connection_op + self._cancel_connection_watchdog(op) + self._pending_connection_op = None + op.complete(error=cause) + else: + logger.info("{}: Connection failure was unexpected".format(self.name)) + handle_exceptions.swallow_unraised_exception(cause, + log_msg="Unexpected connection failure. Safe to ignore.", log_lvl="info") + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def _on_mqtt_disconnected(self, cause=None): + """ + Handler that gets called by the transport when the transport disconnects. + + :param Exception cause: The Exception that caused the disconnection, if any (optional) + """ + if cause: + logger.info("{}: _on_mqtt_disconnect called: {}".format(self.name, cause)) + else: + logger.info("{}: _on_mqtt_disconnect called".format(self.name)) + self.send_event_up(pipeline_events_base.DisconnectedEvent()) + if self._pending_connection_op: + logger.debug("{}: completing pending {} op".format(self.name, self._pending_connection_op.name)) + op = self._pending_connection_op + self._cancel_connection_watchdog(op) + self._pending_connection_op = None + if isinstance(op, pipeline_ops_base.DisconnectOperation) or isinstance(op, pipeline_ops_base.ReauthorizeConnectionOperation): + if cause: + handle_exceptions.swallow_unraised_exception(cause, + log_msg="Unexpected disconnect with error while disconnecting - swallowing error") + op.complete() + else: + if cause: + op.complete(error=cause) + else: + op.complete(error=(transport_exceptions.ConnectionDroppedError("transport disconnected"))) + else: + logger.info("{}: disconnection was unexpected".format(self.name)) + e = transport_exceptions.ConnectionDroppedError(cause=cause) + handle_exceptions.swallow_unraised_exception(e, + log_msg="Unexpected disconnection. Safe to ignore since other stages will reconnect.", + log_lvl="info") \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_thread.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_thread.py new file mode 100644 index 0000000..539de8b --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/pipeline/pipeline_thread.py @@ -0,0 +1,134 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/pipeline/pipeline_thread.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 9542 bytes +import functools, logging, threading, traceback +from multiprocessing.pool import ThreadPool +from concurrent.futures import ThreadPoolExecutor +from azure.iot.device.common import handle_exceptions +logger = logging.getLogger(__name__) +_executors = {} + +def _get_named_executor(thread_name): + """ + Get a ThreadPoolExecutor object with the given name. If no such executor exists, + this function will create on with a single worker and assign it to the provided + name. + """ + global _executors + if thread_name not in _executors: + logger.debug("Creating {} executor".format(thread_name)) + _executors[thread_name] = ThreadPoolExecutor(max_workers=1) + return _executors[thread_name] + + +def _invoke_on_executor_thread(func, thread_name, block=True): + """ + Return wrapper to run the function on a given thread. If block==False, + the call returns immediately without waiting for the decorated function to complete. + If block==True, the call waits for the decorated function to complete before returning. + """ + try: + function_name = func.__name__ + function_has_name = True + except AttributeError: + function_name = str(func) + function_has_name = False + + def wrapper(*args, **kwargs): + if threading.current_thread().name is not thread_name: + logger.debug("Starting {} in {} thread".format(function_name, thread_name)) + + def thread_proc(): + threading.current_thread().name = thread_name + try: + return func(*args, **kwargs) + except Exception as e: + try: + if not block: + handle_exceptions.handle_background_exception(e) + else: + raise + finally: + e = None + del e + + except BaseException: + if not block: + logger.critical("Unhandled exception in background thread") + logger.critical("This may cause the background thread to abort and may result in system instability.") + traceback.print_exc() + raise + + future = _get_named_executor(thread_name).submit(thread_proc) + if block: + return future.result() + return future + else: + logger.debug("Already in {} thread for {}".format(thread_name, function_name)) + return func(*args, **kwargs) + + if function_has_name: + return functools.update_wrapper(wrapped=func, wrapper=wrapper) + wrapper.__wrapped__ = func + return wrapper + + +def invoke_on_pipeline_thread(func): + """ + Run the decorated function on the pipeline thread. + """ + return _invoke_on_executor_thread(func=func, thread_name="pipeline") + + +def invoke_on_pipeline_thread_nowait(func): + """ + Run the decorated function on the pipeline thread, but don't wait for it to complete + """ + return _invoke_on_executor_thread(func=func, thread_name="pipeline", block=False) + + +def invoke_on_callback_thread_nowait(func): + """ + Run the decorated function on the callback thread, but don't wait for it to complete + """ + return _invoke_on_executor_thread(func=func, thread_name="callback", block=False) + + +def invoke_on_http_thread_nowait(func): + """ + Run the decorated function on the callback thread, but don't wait for it to complete + """ + return _invoke_on_executor_thread(func=func, thread_name="azure_iot_http", block=False) + + +def _assert_executor_thread(func, thread_name): + """ + Decorator which asserts that the given function only gets called inside the given + thread. + """ + + @functools.wraps(func) + def wrapper(*args, **kwargs): + assert threading.current_thread().name == thread_name, "\n Function {function_name} is not running inside {thread_name} thread.\n It should be. You should use invoke_on_{thread_name}_thread(_nowait) to enter the\n {thread_name} thread before calling this function. If you're hitting this from\n inside a test function, you may need to add the fake_pipeline_thread fixture to\n your test. (generally applied on the global pytestmark in a module) ".format(function_name=(func.__name__), + thread_name=thread_name) + return func(*args, **kwargs) + + return wrapper + + +def runs_on_pipeline_thread(func): + """ + Decorator which marks a function as only running inside the pipeline thread. + """ + return _assert_executor_thread(func=func, thread_name="pipeline") + + +def runs_on_http_thread(func): + """ + Decorator which marks a function as only running inside the http thread. + """ + return _assert_executor_thread(func=func, thread_name="azure_iot_http") diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/transport_exceptions.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/transport_exceptions.py new file mode 100644 index 0000000..b47a5a9 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/transport_exceptions.py @@ -0,0 +1,32 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/transport_exceptions.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1275 bytes +"""This module defines errors that may be raised from a transport""" +from .chainable_exception import ChainableException + +class ConnectionFailedError(ChainableException): + __doc__ = "\n Connection failed to be established\n " + + +class ConnectionDroppedError(ChainableException): + __doc__ = "\n Previously established connection was dropped\n " + + +class UnauthorizedError(ChainableException): + __doc__ = "\n Authorization was rejected\n " + + +class ProtocolClientError(ChainableException): + __doc__ = "\n Error returned from protocol client library\n " + + +class TlsExchangeAuthError(ChainableException): + __doc__ = "\n Error returned when transport layer exchanges\n result in a SSLCertVerification error.\n " + + +class ProtocolProxyError(ChainableException): + __doc__ = "\n All proxy-related errors.\n TODO : Not sure what to name it here. There is a class called Proxy Error already in Pysocks\n " diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/common/version_compat.py b/APPS_UNCOMPILED/lib/azure/iot/device/common/version_compat.py new file mode 100644 index 0000000..3691eb1 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/common/version_compat.py @@ -0,0 +1,31 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/common/version_compat.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1561 bytes +"""This module defines functions intended for providing compatibility between +different versions of Python""" +from six.moves import urllib + +def urlencode(query, quote_via=urllib.parse.quote_plus, safe=""): + """ Custom implementation of urllib.parse.urlencode(). + + This is necessary because prior to Python 3.5, urlencode() always encodes via + quote_plus() rather than quote(). This is generally not desirable for MQTT, as + it will translate ' ' into '+' rather than '%20', and '+' is not allowed in the + topic strings for MQTT publish. + + Starting in 3.5, the included library function for urlencode() allows you to specify + which style of encoding you want, however this feature is not available in 2.7 and so + we must implement it ourselves. + """ + if isinstance(query, list): + encoded = "&".join(["{}={}".format(quote_via(k, safe=safe), quote_via(v, safe=safe)) for k, v in query]) + else: + if isinstance(query, dict): + encoded = "&".join(["{}={}".format(quote_via(k, safe=safe), quote_via(v, safe=safe)) for k, v in query.items()]) + else: + raise TypeError("Invalid type for 'query'") + return encoded diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/constant.py b/APPS_UNCOMPILED/lib/azure/iot/device/constant.py new file mode 100644 index 0000000..b39e4af --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/constant.py @@ -0,0 +1,20 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/constant.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1015 bytes +"""This module defines constants for use across the azure-iot-device package +""" +VERSION = "2.4.0" +IOTHUB_IDENTIFIER = "azure-iot-device-iothub-py" +PROVISIONING_IDENTIFIER = "azure-iot-device-provisioning-py" +IOTHUB_API_VERSION = "2019-10-01" +PROVISIONING_API_VERSION = "2019-03-31" +SECURITY_MESSAGE_INTERFACE_ID = "urn:azureiot:Security:SecurityAgent:1" +TELEMETRY_MESSAGE_SIZE_LIMIT = 262144 +MAX_KEEP_ALIVE_SECS = 1740 +DIGITAL_TWIN_PREFIX = "dtmi" +DIGITAL_TWIN_API_VERSION = "2020-09-30" +DIGITAL_TWIN_QUERY_HEADER = "model-id" diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/exceptions.py b/APPS_UNCOMPILED/lib/azure/iot/device/exceptions.py new file mode 100644 index 0000000..5cc20c7 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/exceptions.py @@ -0,0 +1,32 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/exceptions.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3770 bytes +"""This module defines an exception surface, exposed as part of the azure.iot.device library API""" +from azure.iot.device.common.chainable_exception import ChainableException + +class OperationCancelled(ChainableException): + __doc__ = "An operation was cancelled" + + +class ClientError(ChainableException): + __doc__ = "Generic error for a client" + + +class ConnectionFailedError(ClientError): + __doc__ = "Failed to establish a connection" + + +class ConnectionDroppedError(ClientError): + __doc__ = "Lost connection while executing operation" + + +class CredentialError(ClientError): + __doc__ = "Could not connect client using given credentials" + + +class ServiceError(ChainableException): + __doc__ = "Error received from an Azure IoT service" diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/__init__.py new file mode 100644 index 0000000..c7e224a --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/__init__.py @@ -0,0 +1,16 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 364 bytes +"""Azure IoT Hub Device Library + +This library provides functionality for communicating with the Azure IoT Hub +as a Device or Module. +""" +from .sync_clients import IoTHubDeviceClient, IoTHubModuleClient +from .models import Message, MethodRequest, MethodResponse +__all__ = [ + 'IoTHubDeviceClient', 'IoTHubModuleClient', 'Message', 'MethodRequest', 'MethodResponse'] diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/abstract_clients.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/abstract_clients.py new file mode 100644 index 0000000..3c89698 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/abstract_clients.py @@ -0,0 +1,621 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/abstract_clients.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 32494 bytes +"""This module contains abstract classes for the various clients of the Azure IoT Hub Device SDK +""" +import six, abc, logging, threading, os, io, time +from . import pipeline +from azure.iot.device.common.auth import connection_string as cs +from azure.iot.device.common.auth import sastoken as st +from azure.iot.device import exceptions +from azure.iot.device.common import auth +from . import edge_hsm +logger = logging.getLogger(__name__) + +def _validate_kwargs(exclude=[], **kwargs): + """Helper function to validate user provided kwargs. + Raises TypeError if an invalid option has been provided""" + valid_kwargs = [ + 'product_info', + 'websockets', + 'cipher', + 'server_verification_cert', + 'proxy_options', + 'sastoken_ttl', + 'keep_alive'] + for kwarg in kwargs: + if kwarg not in valid_kwargs or kwarg in exclude: + raise TypeError("Unsupported keyword argument: '{}'".format(kwarg)) + + +def _get_config_kwargs(**kwargs): + """Get the subset of kwargs which pertain the config object""" + valid_config_kwargs = [ + 'product_info', + 'websockets', + 'cipher', + 'server_verification_cert', + 'proxy_options', + 'keep_alive'] + config_kwargs = {} + for kwarg in kwargs: + if kwarg in valid_config_kwargs: + config_kwargs[kwarg] = kwargs[kwarg] + + return config_kwargs + + +def _form_sas_uri(hostname, device_id, module_id=None): + if module_id: + return "{hostname}/devices/{device_id}/modules/{module_id}".format(hostname=hostname, + device_id=device_id, + module_id=module_id) + return "{hostname}/devices/{device_id}".format(hostname=hostname, device_id=device_id) + + +def _extract_sas_uri_values(uri): + d = {} + items = uri.split("/") + if len(items) != 3: + if len(items) != 5: + raise ValueError("Invalid SAS URI") + if items[1] != "devices": + raise ValueError("Cannot extract device id from SAS URI") + if len(items) > 3: + if items[3] != "modules": + raise ValueError("Cannot extract module id from SAS URI") + d["hostname"] = items[0] + d["device_id"] = items[2] + try: + d["module_id"] = items[4] + except IndexError: + d["module_id"] = None + + return d + + +RECEIVE_TYPE_NONE_SET = "none_set" +RECEIVE_TYPE_HANDLER = "handler" +RECEIVE_TYPE_API = "api" + +@six.add_metaclass(abc.ABCMeta) +class AbstractIoTHubClient(object): + __doc__ = " A superclass representing a generic IoTHub client.\n This class needs to be extended for specific clients.\n " + + def __init__(self, mqtt_pipeline, http_pipeline): + """Initializer for a generic client. + + :param mqtt_pipeline: The pipeline used to connect to the IoTHub endpoint. + :type mqtt_pipeline: :class:`azure.iot.device.iothub.pipeline.MQTTPipeline` + """ + self._mqtt_pipeline = mqtt_pipeline + self._http_pipeline = http_pipeline + self._inbox_manager = None + self._handler_manager = None + self._receive_type = RECEIVE_TYPE_NONE_SET + self._client_lock = threading.Lock() + + def _on_connected(self): + """Helper handler that is called upon an iothub pipeline connect""" + logger.info("Connection State - Connected") + self._handler_manager.ensure_running() + + def _on_disconnected(self): + """Helper handler that is called upon an iothub pipeline disconnect""" + logger.info("Connection State - Disconnected") + self._inbox_manager.clear_all_method_requests() + logger.info("Cleared all pending method requests due to disconnect") + + def _check_receive_mode_is_apiParse error at or near `POP_BLOCK' instruction at offset 48 + + def _check_receive_mode_is_handlerParse error at or near `POP_BLOCK' instruction at offset 56 + + def _replace_user_supplied_sastoken(self, sastoken_str): + """ + Replaces the pipeline's NonRenewableSasToken with a new one based on a provided + sastoken string. Also does validation. + This helper only updates the PipelineConfig - it does not reauthorize the connection. + """ + if not isinstance(self._mqtt_pipeline.pipeline_configuration.sastoken, st.NonRenewableSasToken): + raise exceptions.ClientError("Cannot update sastoken when client was not created with one") + else: + try: + new_token_o = st.NonRenewableSasToken(sastoken_str) + except st.SasTokenError as e: + try: + new_err = ValueError("Invalid SasToken provided") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + vals = _extract_sas_uri_values(new_token_o.resource_uri) + if type(self).__name__ == "IoTHubDeviceClient": + if vals["module_id"]: + raise ValueError("Provided SasToken is for a module") + if type(self).__name__ == "IoTHubModuleClient" and not vals["module_id"]: + raise ValueError("Provided SasToken is for a device") + if self._mqtt_pipeline.pipeline_configuration.device_id != vals["device_id"]: + raise ValueError("Provided SasToken does not match existing device id") + if self._mqtt_pipeline.pipeline_configuration.module_id != vals["module_id"]: + raise ValueError("Provided SasToken does not match existing module id") + if self._mqtt_pipeline.pipeline_configuration.hostname != vals["hostname"]: + raise ValueError("Provided SasToken does not match existing hostname") + if new_token_o.expiry_time < int(time.time()): + raise ValueError("Provided SasToken has already expired") + self._mqtt_pipeline.pipeline_configuration.sastoken = new_token_o + + @classmethod + def create_from_connection_string(cls, connection_string, **kwargs): + """ + Instantiate the client from a IoTHub device or module connection string. + + :param str connection_string: The connection string for the IoTHub you wish to connect to. + + :param str server_verification_cert: Configuration Option. The trusted certificate chain. + Necessary when using connecting to an endpoint which has a non-standard root of trust, + such as a protocol gateway. + :param bool websockets: Configuration Option. Default is False. Set to true if using MQTT + over websockets. + :param cipher: Configuration Option. Cipher suite(s) for TLS/SSL, as a string in + "OpenSSL cipher list format" or as a list of cipher suite strings. + :type cipher: str or list(str) + :param str product_info: Configuration Option. Default is empty string. The string contains + arbitrary product info which is appended to the user agent string. + :param proxy_options: Options for sending traffic through proxy servers. + :type proxy_options: :class:`azure.iot.device.ProxyOptions` + :param int sastoken_ttl: The time to live (in seconds) for the created SasToken used for + authentication. Default is 3600 seconds (1 hour) + :param int keep_alive: Maximum period in seconds between communications with the + broker. If no other messages are being exchanged, this controls the + rate at which the client will send ping messages to the broker. + If not provided default value of 60 secs will be used. + + :raises: ValueError if given an invalid connection_string. + :raises: TypeError if given an unsupported parameter. + + :returns: An instance of an IoTHub client that uses a connection string for authentication. + """ + _validate_kwargs(**kwargs) + connection_string = cs.ConnectionString(connection_string) + uri = _form_sas_uri(hostname=(connection_string[cs.HOST_NAME]), + device_id=(connection_string[cs.DEVICE_ID]), + module_id=(connection_string.get(cs.MODULE_ID))) + signing_mechanism = auth.SymmetricKeySigningMechanism(key=(connection_string[cs.SHARED_ACCESS_KEY])) + token_ttl = kwargs.get("sastoken_ttl", 3600) + try: + sastoken = st.RenewableSasToken(uri, signing_mechanism, ttl=token_ttl) + except st.SasTokenError as e: + try: + new_err = ValueError("Could not create a SasToken using provided values") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + config_kwargs = _get_config_kwargs(**kwargs) + pipeline_configuration = (pipeline.IoTHubPipelineConfig)(device_id=connection_string[cs.DEVICE_ID], + module_id=connection_string.get(cs.MODULE_ID), + hostname=connection_string[cs.HOST_NAME], + gateway_hostname=connection_string.get(cs.GATEWAY_HOST_NAME), + sastoken=sastoken, **config_kwargs) + if cls.__name__ == "IoTHubDeviceClient": + pipeline_configuration.blob_upload = True + http_pipeline = pipeline.HTTPPipeline(pipeline_configuration) + mqtt_pipeline = pipeline.MQTTPipeline(pipeline_configuration) + return cls(mqtt_pipeline, http_pipeline) + + @classmethod + def create_from_sastoken(cls, sastoken, **kwargs): + """Instantiate the client from a pre-created SAS Token string + + :param str sastoken: The SAS Token string + + :param str server_verification_cert: Configuration Option. The trusted certificate chain. + Necessary when using connecting to an endpoint which has a non-standard root of trust, + such as a protocol gateway. + :param bool websockets: Configuration Option. Default is False. Set to true if using MQTT + over websockets. + :param cipher: Configuration Option. Cipher suite(s) for TLS/SSL, as a string in + "OpenSSL cipher list format" or as a list of cipher suite strings. + :type cipher: str or list(str) + :param str product_info: Configuration Option. Default is empty string. The string contains + arbitrary product info which is appended to the user agent string. + :param proxy_options: Options for sending traffic through proxy servers. + :type proxy_options: :class:`azure.iot.device.ProxyOptions` + :param int keep_alive: Maximum period in seconds between communications with the + broker. If no other messages are being exchanged, this controls the + rate at which the client will send ping messages to the broker. + If not provided default value of 60 secs will be used. + + :raises: TypeError if given an unsupported parameter. + :raises: ValueError if the sastoken parameter is invalid. + """ + excluded_kwargs = [ + "sastoken_ttl"] + _validate_kwargs(exclude=excluded_kwargs, **kwargs) + try: + sastoken_o = st.NonRenewableSasToken(sastoken) + except st.SasTokenError as e: + try: + new_err = ValueError("Invalid SasToken provided") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + vals = _extract_sas_uri_values(sastoken_o.resource_uri) + if cls.__name__ == "IoTHubDeviceClient": + if vals["module_id"]: + raise ValueError("Provided SasToken is for a module") + if cls.__name__ == "IoTHubModuleClient": + if not vals["module_id"]: + raise ValueError("Provided SasToken is for a device") + if sastoken_o.expiry_time < int(time.time()): + raise ValueError("Provided SasToken has already expired") + config_kwargs = _get_config_kwargs(**kwargs) + pipeline_configuration = (pipeline.IoTHubPipelineConfig)(device_id=vals["device_id"], + module_id=vals["module_id"], + hostname=vals["hostname"], + sastoken=sastoken_o, **config_kwargs) + if cls.__name__ == "IoTHubDeviceClient": + pipeline_configuration.blob_upload = True + http_pipeline = pipeline.HTTPPipeline(pipeline_configuration) + mqtt_pipeline = pipeline.MQTTPipeline(pipeline_configuration) + return cls(mqtt_pipeline, http_pipeline) + + @abc.abstractmethod + def connect(self): + pass + + @abc.abstractmethod + def disconnect(self): + pass + + @abc.abstractmethod + def update_sastoken(self, sastoken): + pass + + @abc.abstractmethod + def send_message(self, message): + pass + + @abc.abstractmethod + def receive_method_request(self, method_name=None): + pass + + @abc.abstractmethod + def send_method_response(self, method_request, payload, status): + pass + + @abc.abstractmethod + def get_twin(self): + pass + + @abc.abstractmethod + def patch_twin_reported_properties(self, reported_properties_patch): + pass + + @abc.abstractmethod + def receive_twin_desired_properties_patch(self): + pass + + @property + def connected(self): + """ + Read-only property to indicate if the transport is connected or not. + """ + return self._mqtt_pipeline.connected + + @abc.abstractproperty + def on_message_received(self): + pass + + @abc.abstractproperty + def on_method_request_received(self): + pass + + @abc.abstractproperty + def on_twin_desired_properties_patch_received(self): + pass + + +@six.add_metaclass(abc.ABCMeta) +class AbstractIoTHubDeviceClient(AbstractIoTHubClient): + + @classmethod + def create_from_x509_certificate(cls, x509, hostname, device_id, **kwargs): + """ + Instantiate a client using X509 certificate authentication. + + :param str hostname: Host running the IotHub. + Can be found in the Azure portal in the Overview tab as the string hostname. + :param x509: The complete x509 certificate object. + To use the certificate the enrollment object needs to contain cert + (either the root certificate or one of the intermediate CA certificates). + If the cert comes from a CER file, it needs to be base64 encoded. + :type x509: :class:`azure.iot.device.X509` + :param str device_id: The ID used to uniquely identify a device in the IoTHub + + :param str server_verification_cert: Configuration Option. The trusted certificate chain. + Necessary when using connecting to an endpoint which has a non-standard root of trust, + such as a protocol gateway. + :param bool websockets: Configuration Option. Default is False. Set to true if using MQTT + over websockets. + :param cipher: Configuration Option. Cipher suite(s) for TLS/SSL, as a string in + "OpenSSL cipher list format" or as a list of cipher suite strings. + :type cipher: str or list(str) + :param str product_info: Configuration Option. Default is empty string. The string contains + arbitrary product info which is appended to the user agent string. + :param proxy_options: Options for sending traffic through proxy servers. + :type proxy_options: :class:`azure.iot.device.ProxyOptions` + :param int keep_alive: Maximum period in seconds between communications with the + broker. If no other messages are being exchanged, this controls the + rate at which the client will send ping messages to the broker. + If not provided default value of 60 secs will be used. + + :raises: TypeError if given an unsupported parameter. + + :returns: An instance of an IoTHub client that uses an X509 certificate for authentication. + """ + excluded_kwargs = [ + "sastoken_ttl"] + _validate_kwargs(exclude=excluded_kwargs, **kwargs) + config_kwargs = _get_config_kwargs(**kwargs) + pipeline_configuration = (pipeline.IoTHubPipelineConfig)(device_id=device_id, + hostname=hostname, x509=x509, **config_kwargs) + pipeline_configuration.blob_upload = True + http_pipeline = pipeline.HTTPPipeline(pipeline_configuration) + mqtt_pipeline = pipeline.MQTTPipeline(pipeline_configuration) + return cls(mqtt_pipeline, http_pipeline) + + @classmethod + def create_from_symmetric_key(cls, symmetric_key, hostname, device_id, **kwargs): + """ + Instantiate a client using symmetric key authentication. + + :param symmetric_key: The symmetric key. + :param str hostname: Host running the IotHub. + Can be found in the Azure portal in the Overview tab as the string hostname. + :param device_id: The device ID + + :param str server_verification_cert: Configuration Option. The trusted certificate chain. + Necessary when using connecting to an endpoint which has a non-standard root of trust, + such as a protocol gateway. + :param bool websockets: Configuration Option. Default is False. Set to true if using MQTT + over websockets. + :param cipher: Configuration Option. Cipher suite(s) for TLS/SSL, as a string in + "OpenSSL cipher list format" or as a list of cipher suite strings. + :type cipher: str or list(str) + :param str product_info: Configuration Option. Default is empty string. The string contains + arbitrary product info which is appended to the user agent string. + :param proxy_options: Options for sending traffic through proxy servers. + :type proxy_options: :class:`azure.iot.device.ProxyOptions` + :param int sastoken_ttl: The time to live (in seconds) for the created SasToken used for + authentication. Default is 3600 seconds (1 hour) + :param int keep_alive: Maximum period in seconds between communications with the + broker. If no other messages are being exchanged, this controls the + rate at which the client will send ping messages to the broker. + If not provided default value of 60 secs will be used. + + :raises: TypeError if given an unsupported parameter. + :raises: ValueError if the provided parameters are invalid. + + :return: An instance of an IoTHub client that uses a symmetric key for authentication. + """ + _validate_kwargs(**kwargs) + uri = _form_sas_uri(hostname=hostname, device_id=device_id) + signing_mechanism = auth.SymmetricKeySigningMechanism(key=symmetric_key) + token_ttl = kwargs.get("sastoken_ttl", 3600) + try: + sastoken = st.RenewableSasToken(uri, signing_mechanism, ttl=token_ttl) + except st.SasTokenError as e: + try: + new_err = ValueError("Could not create a SasToken using provided values") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + config_kwargs = _get_config_kwargs(**kwargs) + pipeline_configuration = (pipeline.IoTHubPipelineConfig)(device_id=device_id, + hostname=hostname, sastoken=sastoken, **config_kwargs) + pipeline_configuration.blob_upload = True + http_pipeline = pipeline.HTTPPipeline(pipeline_configuration) + mqtt_pipeline = pipeline.MQTTPipeline(pipeline_configuration) + return cls(mqtt_pipeline, http_pipeline) + + @abc.abstractmethod + def receive_message(self): + pass + + @abc.abstractmethod + def get_storage_info_for_blob(self, blob_name): + pass + + @abc.abstractmethod + def notify_blob_upload_status(self, correlation_id, is_success, status_code, status_description): + pass + + +@six.add_metaclass(abc.ABCMeta) +class AbstractIoTHubModuleClient(AbstractIoTHubClient): + + @classmethod + def create_from_edge_environment(cls, **kwargs): + """ + Instantiate the client from the IoT Edge environment. + + This method can only be run from inside an IoT Edge container, or in a debugging + environment configured for Edge development (e.g. Visual Studio, Visual Studio Code) + + :param bool websockets: Configuration Option. Default is False. Set to true if using MQTT + over websockets. + :param cipher: Configuration Option. Cipher suite(s) for TLS/SSL, as a string in + "OpenSSL cipher list format" or as a list of cipher suite strings. + :type cipher: str or list(str) + :param str product_info: Configuration Option. Default is empty string. The string contains + arbitrary product info which is appended to the user agent string. + :param proxy_options: Options for sending traffic through proxy servers. + :type proxy_options: :class:`azure.iot.device.ProxyOptions` + :param int sastoken_ttl: The time to live (in seconds) for the created SasToken used for + authentication. Default is 3600 seconds (1 hour) + :param int keep_alive: Maximum period in seconds between communications with the + broker. If no other messages are being exchanged, this controls the + rate at which the client will send ping messages to the broker. + If not provided default value of 60 secs will be used. + + :raises: OSError if the IoT Edge container is not configured correctly. + :raises: ValueError if debug variables are invalid. + :raises: TypeError if given an unsupported parameter. + + :returns: An instance of an IoTHub client that uses the IoT Edge environment for + authentication. + """ + excluded_kwargs = [ + "server_verification_cert"] + _validate_kwargs(exclude=excluded_kwargs, **kwargs) + try: + hostname = os.environ["IOTEDGE_IOTHUBHOSTNAME"] + device_id = os.environ["IOTEDGE_DEVICEID"] + module_id = os.environ["IOTEDGE_MODULEID"] + gateway_hostname = os.environ["IOTEDGE_GATEWAYHOSTNAME"] + module_generation_id = os.environ["IOTEDGE_MODULEGENERATIONID"] + workload_uri = os.environ["IOTEDGE_WORKLOADURI"] + api_version = os.environ["IOTEDGE_APIVERSION"] + except KeyError: + try: + connection_string = os.environ["EdgeHubConnectionString"] + ca_cert_filepath = os.environ["EdgeModuleCACertificateFile"] + except KeyError as e: + try: + new_err = OSError("IoT Edge environment not configured correctly") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + try: + with io.open(ca_cert_filepath, mode="r") as ca_cert_file: + server_verification_cert = ca_cert_file.read() + except (OSError, IOError) as e: + try: + new_err = ValueError("Invalid CA certificate file") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + connection_string = cs.ConnectionString(connection_string) + try: + device_id = connection_string[cs.DEVICE_ID] + module_id = connection_string[cs.MODULE_ID] + hostname = connection_string[cs.HOST_NAME] + gateway_hostname = connection_string[cs.GATEWAY_HOST_NAME] + except KeyError: + raise ValueError("Invalid Connection String") + + signing_mechanism = auth.SymmetricKeySigningMechanism(key=(connection_string[cs.SHARED_ACCESS_KEY])) + else: + hsm = edge_hsm.IoTEdgeHsm(module_id=module_id, + generation_id=module_generation_id, + workload_uri=workload_uri, + api_version=api_version) + try: + server_verification_cert = hsm.get_certificate() + except edge_hsm.IoTEdgeError as e: + try: + new_err = OSError("Unexpected failure in IoTEdge") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + signing_mechanism = hsm + uri = _form_sas_uri(hostname=hostname, device_id=device_id, module_id=module_id) + token_ttl = kwargs.get("sastoken_ttl", 3600) + try: + sastoken = st.RenewableSasToken(uri, signing_mechanism, ttl=token_ttl) + except st.SasTokenError as e: + try: + new_err = ValueError("Could not create a SasToken using the values provided, or in the Edge environment") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + config_kwargs = _get_config_kwargs(**kwargs) + pipeline_configuration = (pipeline.IoTHubPipelineConfig)(**, **config_kwargs) + pipeline_configuration.method_invoke = True + http_pipeline = pipeline.HTTPPipeline(pipeline_configuration) + mqtt_pipeline = pipeline.MQTTPipeline(pipeline_configuration) + return cls(mqtt_pipeline, http_pipeline) + + @classmethod + def create_from_x509_certificate(cls, x509, hostname, device_id, module_id, **kwargs): + """ + Instantiate a client using X509 certificate authentication. + + :param str hostname: Host running the IotHub. + Can be found in the Azure portal in the Overview tab as the string hostname. + :param x509: The complete x509 certificate object. + To use the certificate the enrollment object needs to contain cert + (either the root certificate or one of the intermediate CA certificates). + If the cert comes from a CER file, it needs to be base64 encoded. + :type x509: :class:`azure.iot.device.X509` + :param str device_id: The ID used to uniquely identify a device in the IoTHub + :param str module_id: The ID used to uniquely identify a module on a device on the IoTHub. + + :param str server_verification_cert: Configuration Option. The trusted certificate chain. + Necessary when using connecting to an endpoint which has a non-standard root of trust, + such as a protocol gateway. + :param bool websockets: Configuration Option. Default is False. Set to true if using MQTT + over websockets. + :param cipher: Configuration Option. Cipher suite(s) for TLS/SSL, as a string in + "OpenSSL cipher list format" or as a list of cipher suite strings. + :type cipher: str or list(str) + :param str product_info: Configuration Option. Default is empty string. The string contains + arbitrary product info which is appended to the user agent string. + :param proxy_options: Options for sending traffic through proxy servers. + :type proxy_options: :class:`azure.iot.device.ProxyOptions` + :param int keep_alive: Maximum period in seconds between communications with the + broker. If no other messages are being exchanged, this controls the + rate at which the client will send ping messages to the broker. + If not provided default value of 60 secs will be used. + + :raises: TypeError if given an unsupported parameter. + + :returns: An instance of an IoTHub client that uses an X509 certificate for authentication. + """ + excluded_kwargs = [ + "sastoken_ttl"] + _validate_kwargs(exclude=excluded_kwargs, **kwargs) + config_kwargs = _get_config_kwargs(**kwargs) + pipeline_configuration = (pipeline.IoTHubPipelineConfig)(**, **config_kwargs) + http_pipeline = pipeline.HTTPPipeline(pipeline_configuration) + mqtt_pipeline = pipeline.MQTTPipeline(pipeline_configuration) + return cls(mqtt_pipeline, http_pipeline) + + @abc.abstractmethod + def send_message_to_output(self, message, output_name): + pass + + @abc.abstractmethod + def receive_message_on_input(self, input_name): + pass + + @abc.abstractmethod + def invoke_method(self, method_params, device_id, module_id=None): + pass \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/__init__.py new file mode 100644 index 0000000..c79ce22 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/__init__.py @@ -0,0 +1,15 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/aio/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 280 bytes +"""Azure IoT Hub Device SDK - Asynchronous + +This SDK provides asynchronous functionality for communicating with the Azure IoT Hub +as a Device or Module. +""" +from .async_clients import IoTHubDeviceClient, IoTHubModuleClient +__all__ = [ + "IoTHubDeviceClient", "IoTHubModuleClient"] diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_clients.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_clients.py new file mode 100644 index 0000000..02c6fa6 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_clients.py @@ -0,0 +1,583 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/aio/async_clients.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 32314 bytes +"""This module contains user-facing asynchronous clients for the +Azure IoTHub Device SDK for Python. +""" +import logging, asyncio, deprecation +from azure.iot.device.common import async_adapter +from azure.iot.device.iothub.abstract_clients import AbstractIoTHubClient, AbstractIoTHubDeviceClient, AbstractIoTHubModuleClient +from azure.iot.device.iothub.models import Message +from azure.iot.device.iothub.pipeline import constant +from azure.iot.device.iothub.pipeline import exceptions as pipeline_exceptions +from azure.iot.device import exceptions +from azure.iot.device.iothub.inbox_manager import InboxManager +from .async_inbox import AsyncClientInbox +from . import async_handler_manager, loop_management +from azure.iot.device import constant as device_constant +logger = logging.getLogger(__name__) + +async def handle_result(callback): + try: + return await callback.completion() + except pipeline_exceptions.ConnectionDroppedError as e: + try: + raise exceptions.ConnectionDroppedError(message="Lost connection to IoTHub", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ConnectionFailedError as e: + try: + raise exceptions.ConnectionFailedError(message="Could not connect to IoTHub", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.UnauthorizedError as e: + try: + raise exceptions.CredentialError(message="Credentials invalid, could not connect", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ProtocolClientError as e: + try: + raise exceptions.ClientError(message="Error in the IoTHub client", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.TlsExchangeAuthError as e: + try: + raise exceptions.ClientError(message="Error in the IoTHub client due to TLS exchanges.", + cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ProtocolProxyError as e: + try: + raise exceptions.ClientError(message="Error in the IoTHub client raised due to proxy connections.", + cause=e) + finally: + e = None + del e + + except Exception as e: + try: + raise exceptions.ClientError(message="Unexpected failure", cause=e) + finally: + e = None + del e + + +class GenericIoTHubClient(AbstractIoTHubClient): + __doc__ = "A super class representing a generic asynchronous client.\n This class needs to be extended for specific clients.\n " + + def __init__(self, **kwargs): + """Initializer for a generic asynchronous client. + + This initializer should not be called directly. + Instead, use one of the 'create_from_' classmethods to instantiate + + :param mqtt_pipeline: The MQTTPipeline used for the client + :type mqtt_pipeline: :class:`azure.iot.device.iothub.pipeline.MQTTPipeline` + :param http_pipeline: The HTTPPipeline used for the client + :type http_pipeline: :class:`azure.iot.device.iothub.pipeline.HTTPPipeline` + """ + (super().__init__)(**kwargs) + self._inbox_manager = InboxManager(inbox_type=AsyncClientInbox) + self._handler_manager = async_handler_manager.AsyncHandlerManager(self._inbox_manager) + self._mqtt_pipeline.on_connected = self._on_connected + self._mqtt_pipeline.on_disconnected = self._on_disconnected + self._mqtt_pipeline.on_method_request_received = self._inbox_manager.route_method_request + self._mqtt_pipeline.on_twin_patch_received = self._inbox_manager.route_twin_patch + + async def _enable_feature(self, feature_name): + """Enable an Azure IoT Hub feature + + :param feature_name: The name of the feature to enable. + See azure.iot.device.common.pipeline.constant for possible values. + """ + logger.info("Enabling feature:" + feature_name + "...") + if not self._mqtt_pipeline.feature_enabled[feature_name]: + enable_feature_async = async_adapter.emulate_async(self._mqtt_pipeline.enable_feature) + callback = async_adapter.AwaitableCallback() + await enable_feature_async(feature_name, callback=callback) + await handle_result(callback) + logger.info("Successfully enabled feature:" + feature_name) + else: + logger.info("Feature ({}) already enabled - skipping".format(feature_name)) + + async def _disable_feature(self, feature_name): + """Disable an Azure IoT Hub feature + + :param feature_name: The name of the feature to enable. + See azure.iot.device.common.pipeline.constant for possible values. + """ + logger.info("Disabling feature: {}...".format(feature_name)) + if self._mqtt_pipeline.feature_enabled[feature_name]: + disable_feature_async = async_adapter.emulate_async(self._mqtt_pipeline.disable_feature) + callback = async_adapter.AwaitableCallback() + await disable_feature_async(feature_name, callback=callback) + await handle_result(callback) + logger.info("Successfully disabled feature: {}".format(feature_name)) + else: + logger.info("Feature ({}) already disabled - skipping".format(feature_name)) + + async def connect(self): + """Connects the client to an Azure IoT Hub or Azure IoT Edge Hub instance. + + The destination is chosen based on the credentials passed via the auth_provider parameter + that was provided when this object was initialized. + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Connecting to Hub...") + connect_async = async_adapter.emulate_async(self._mqtt_pipeline.connect) + callback = async_adapter.AwaitableCallback() + await connect_async(callback=callback) + await handle_result(callback) + logger.info("Successfully connected to Hub") + + async def disconnect(self): + """Disconnect the client from the Azure IoT Hub or Azure IoT Edge Hub instance. + + It is recommended that you make sure to call this coroutine when you are completely done + with the your client instance. + + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Disconnecting from Hub...") + logger.debug("Executing initial disconnect") + disconnect_async = async_adapter.emulate_async(self._mqtt_pipeline.disconnect) + callback = async_adapter.AwaitableCallback() + await disconnect_async(callback=callback) + await handle_result(callback) + logger.debug("Successfully executed initial disconnect") + logger.debug("Stopping handlers...") + self._handler_manager.stop() + logger.debug("Successfully stopped handlers") + logger.debug("Executing secondary disconnect...") + disconnect_async = async_adapter.emulate_async(self._mqtt_pipeline.disconnect) + callback = async_adapter.AwaitableCallback() + await disconnect_async(callback=callback) + await handle_result(callback) + logger.debug("Successfully executed secondary disconnect") + logger.info("Successfully disconnected from Hub") + + async def update_sastoken(self, sastoken): + """ + Update the client's SAS Token used for authentication, then reauthorizes the connection. + + This API can only be used if the client was initially created with a SAS Token. + Note also that this API may return before the reauthorization/reconnection is completed. + This means that some errors that may occur as part of the reconnection could occur in the + background, and will not be raised by this method. + + :param str sastoken: The new SAS Token string for the client to use + + :raises: :class:`azure.iot.device.exceptions.ClientError` if the client was not initially + created with a SAS token. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + :raises: ValueError if the sastoken parameter is invalid + """ + self._replace_user_supplied_sastoken(sastoken) + logger.info("Reauthorizing connection with Hub...") + reauth_connection_async = async_adapter.emulate_async(self._mqtt_pipeline.reauthorize_connection) + callback = async_adapter.AwaitableCallback() + await reauth_connection_async(callback=callback) + await handle_result(callback) + logger.info("Successfully reauthorized connection to Hub") + + async def send_message(self, message): + """Sends a message to the default events endpoint on the Azure IoT Hub or Azure IoT Edge Hub instance. + + If the connection to the service has not previously been opened by a call to connect, this + function will open the connection before sending the event. + + :param message: The actual message to send. Anything passed that is not an instance of the + Message class will be converted to Message object. + :type message: :class:`azure.iot.device.Message` or str + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + :raises: ValueError if the message fails size validation. + """ + if not isinstance(message, Message): + message = Message(message) + if message.get_size() > device_constant.TELEMETRY_MESSAGE_SIZE_LIMIT: + raise ValueError("Size of telemetry message can not exceed 256 KB.") + logger.info("Sending message to Hub...") + send_message_async = async_adapter.emulate_async(self._mqtt_pipeline.send_message) + callback = async_adapter.AwaitableCallback() + await send_message_async(message, callback=callback) + await handle_result(callback) + logger.info("Successfully sent message to Hub") + + @deprecation.deprecated(deprecated_in="2.3.0", + current_version=(device_constant.VERSION), + details="We recommend that you use the .on_method_request_received property to set a handler instead") + async def receive_method_request(self, method_name=None): + """Receive a method request via the Azure IoT Hub or Azure IoT Edge Hub. + + If no method request is yet available, will wait until it is available. + + :param str method_name: Optionally provide the name of the method to receive requests for. + If this parameter is not given, all methods not already being specifically targeted by + a different call to receive_method will be received. + + :returns: MethodRequest object representing the received method request. + :rtype: :class:`azure.iot.device.MethodRequest` + """ + self._check_receive_mode_is_api() + if not self._mqtt_pipeline.feature_enabled[constant.METHODS]: + await self._enable_feature(constant.METHODS) + method_inbox = self._inbox_manager.get_method_request_inbox(method_name) + logger.info("Waiting for method request...") + method_request = await method_inbox.get() + logger.info("Received method request") + return method_request + + async def send_method_response(self, method_response): + """Send a response to a method request via the Azure IoT Hub or Azure IoT Edge Hub. + + If the connection to the service has not previously been opened by a call to connect, this + function will open the connection before sending the event. + + :param method_response: The MethodResponse to send + :type method_response: :class:`azure.iot.device.MethodResponse` + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Sending method response to Hub...") + send_method_response_async = async_adapter.emulate_async(self._mqtt_pipeline.send_method_response) + callback = async_adapter.AwaitableCallback() + await send_method_response_async(method_response, callback=callback) + await handle_result(callback) + logger.info("Successfully sent method response to Hub") + + async def get_twin(self): + """ + Gets the device or module twin from the Azure IoT Hub or Azure IoT Edge Hub service. + + :returns: Complete Twin as a JSON dict + :rtype: dict + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Getting twin") + if not self._mqtt_pipeline.feature_enabled[constant.TWIN]: + await self._enable_feature(constant.TWIN) + get_twin_async = async_adapter.emulate_async(self._mqtt_pipeline.get_twin) + callback = async_adapter.AwaitableCallback(return_arg_name="twin") + await get_twin_async(callback=callback) + twin = await handle_result(callback) + logger.info("Successfully retrieved twin") + return twin + + async def patch_twin_reported_properties(self, reported_properties_patch): + """ + Update reported properties with the Azure IoT Hub or Azure IoT Edge Hub service. + + If the service returns an error on the patch operation, this function will raise the + appropriate error. + + :param reported_properties_patch: Twin Reported Properties patch as a JSON dict + :type reported_properties_patch: dict + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Patching twin reported properties") + if not self._mqtt_pipeline.feature_enabled[constant.TWIN]: + await self._enable_feature(constant.TWIN) + patch_twin_async = async_adapter.emulate_async(self._mqtt_pipeline.patch_twin_reported_properties) + callback = async_adapter.AwaitableCallback() + await patch_twin_async(patch=reported_properties_patch, callback=callback) + await handle_result(callback) + logger.info("Successfully sent twin patch") + + @deprecation.deprecated(deprecated_in="2.3.0", + current_version=(device_constant.VERSION), + details="We recommend that you use the .on_twin_desired_properties_patch_received property to set a handler instead") + async def receive_twin_desired_properties_patch(self): + """ + Receive a desired property patch via the Azure IoT Hub or Azure IoT Edge Hub. + + If no method request is yet available, will wait until it is available. + + :returns: Twin Desired Properties patch as a JSON dict + :rtype: dict + """ + self._check_receive_mode_is_api() + if not self._mqtt_pipeline.feature_enabled[constant.TWIN_PATCHES]: + await self._enable_feature(constant.TWIN_PATCHES) + twin_patch_inbox = self._inbox_manager.get_twin_patch_inbox() + logger.info("Waiting for twin patches...") + patch = await twin_patch_inbox.get() + logger.info("twin patch received") + return patch + + def _generic_handler_setter(self, handler_name, feature_name, new_handler): + self._check_receive_mode_is_handler() + setattr(self._handler_manager, handler_name, new_handler) + if new_handler is not None: + loop = self._mqtt_pipeline.feature_enabled[feature_name] or loop_management.get_client_internal_loop() + fut = asyncio.run_coroutine_threadsafe((self._enable_feature(feature_name)), loop=loop) + fut.result() + else: + if new_handler is None: + if self._mqtt_pipeline.feature_enabled[feature_name]: + loop = loop_management.get_client_internal_loop() + fut = asyncio.run_coroutine_threadsafe((self._disable_feature(feature_name)), loop=loop) + fut.result() + + @property + def on_twin_desired_properties_patch_received(self): + """The handler function or coroutine that will be called when a twin desired properties + patch is received. + + The function or coroutine definition should take one positional argument (the twin patch + in the form of a JSON dictionary object)""" + return self._handler_manager.on_twin_desired_properties_patch_received + + @on_twin_desired_properties_patch_received.setter + def on_twin_desired_properties_patch_received(self, value): + self._generic_handler_setter("on_twin_desired_properties_patch_received", constant.TWIN_PATCHES, value) + + @property + def on_method_request_received(self): + """The handler function or coroutine that will be called when a method request is received. + + The function or coroutine definition should take one positional argument (the + :class:`azure.iot.device.MethodRequest` object)""" + return self._handler_manager.on_method_request_received + + @on_method_request_received.setter + def on_method_request_received(self, value): + self._generic_handler_setter("on_method_request_received", constant.METHODS, value) + + +class IoTHubDeviceClient(GenericIoTHubClient, AbstractIoTHubDeviceClient): + __doc__ = "An asynchronous device client that connects to an Azure IoT Hub instance.\n\n Intended for usage with Python 3.5.3+\n " + + def __init__(self, mqtt_pipeline, http_pipeline): + """Initializer for a IoTHubDeviceClient. + + This initializer should not be called directly. + Instead, use one of the 'create_from_' classmethods to instantiate + + :param mqtt_pipeline: The pipeline used to connect to the IoTHub endpoint. + :type mqtt_pipeline: :class:`azure.iot.device.iothub.pipeline.MQTTPipeline` + """ + super().__init__(mqtt_pipeline=mqtt_pipeline, http_pipeline=http_pipeline) + self._mqtt_pipeline.on_c2d_message_received = self._inbox_manager.route_c2d_message + + @deprecation.deprecated(deprecated_in="2.3.0", + current_version=(device_constant.VERSION), + details="We recommend that you use the .on_message_received property to set a handler instead") + async def receive_message(self): + """Receive a message that has been sent from the Azure IoT Hub. + + If no message is yet available, will wait until an item is available. + + :returns: Message that was sent from the Azure IoT Hub. + :rtype: :class:`azure.iot.device.Message` + """ + self._check_receive_mode_is_api() + if not self._mqtt_pipeline.feature_enabled[constant.C2D_MSG]: + await self._enable_feature(constant.C2D_MSG) + c2d_inbox = self._inbox_manager.get_c2d_message_inbox() + logger.info("Waiting for message from Hub...") + message = await c2d_inbox.get() + logger.info("Message received") + return message + + async def get_storage_info_for_blob(self, blob_name): + """Sends a POST request over HTTP to an IoTHub endpoint that will return information for uploading via the Azure Storage Account linked to the IoTHub your device is connected to. + + :param str blob_name: The name in string format of the blob that will be uploaded using the storage API. This name will be used to generate the proper credentials for Storage, and needs to match what will be used with the Azure Storage SDK to perform the blob upload. + + :returns: A JSON-like (dictionary) object from IoT Hub that will contain relevant information including: correlationId, hostName, containerName, blobName, sasToken. + """ + get_storage_info_for_blob_async = async_adapter.emulate_async(self._http_pipeline.get_storage_info_for_blob) + callback = async_adapter.AwaitableCallback(return_arg_name="storage_info") + await get_storage_info_for_blob_async(blob_name=blob_name, callback=callback) + storage_info = await handle_result(callback) + logger.info("Successfully retrieved storage_info") + return storage_info + + async def notify_blob_upload_status(self, correlation_id, is_success, status_code, status_description): + """When the upload is complete, the device sends a POST request to the IoT Hub endpoint with information on the status of an upload to blob attempt. This is used by IoT Hub to notify listening clients. + + :param str correlation_id: Provided by IoT Hub on get_storage_info_for_blob request. + :param bool is_success: A boolean that indicates whether the file was uploaded successfully. + :param int status_code: A numeric status code that is the status for the upload of the fiel to storage. + :param str status_description: A description that corresponds to the status_code. + """ + notify_blob_upload_status_async = async_adapter.emulate_async(self._http_pipeline.notify_blob_upload_status) + callback = async_adapter.AwaitableCallback() + await notify_blob_upload_status_async(correlation_id=correlation_id, + is_success=is_success, + status_code=status_code, + status_description=status_description, + callback=callback) + await handle_result(callback) + logger.info("Successfully notified blob upload status") + + @property + def on_message_received(self): + """The handler function or coroutine that will be called when a message is received. + + The function or coroutine definition should take one positional argument (the + :class:`azure.iot.device.Message` object)""" + return self._handler_manager.on_message_received + + @on_message_received.setter + def on_message_received(self, value): + self._generic_handler_setter("on_message_received", constant.C2D_MSG, value) + + +class IoTHubModuleClient(GenericIoTHubClient, AbstractIoTHubModuleClient): + __doc__ = "An asynchronous module client that connects to an Azure IoT Hub or Azure IoT Edge instance.\n\n Intended for usage with Python 3.5.3+\n " + + def __init__(self, mqtt_pipeline, http_pipeline): + """Intializer for a IoTHubModuleClient. + + This initializer should not be called directly. + Instead, use one of the 'create_from_' classmethods to instantiate + + :param mqtt_pipeline: The pipeline used to connect to the IoTHub endpoint. + :type mqtt_pipeline: :class:`azure.iot.device.iothub.pipeline.MQTTPipeline` + """ + super().__init__(mqtt_pipeline=mqtt_pipeline, http_pipeline=http_pipeline) + self._mqtt_pipeline.on_input_message_received = self._inbox_manager.route_input_message + + async def send_message_to_output(self, message, output_name): + """Sends an event/message to the given module output. + + These are outgoing events and are meant to be "output events" + + If the connection to the service has not previously been opened by a call to connect, this + function will open the connection before sending the event. + + :param message: Message to send to the given output. Anything passed that is not an + instance of the Message class will be converted to Message object. + :type message: :class:`azure.iot.device.Message` or str + :param str output_name: Name of the output to send the event to. + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + :raises: ValueError if the message fails size validation. + """ + if not isinstance(message, Message): + message = Message(message) + if message.get_size() > device_constant.TELEMETRY_MESSAGE_SIZE_LIMIT: + raise ValueError("Size of message can not exceed 256 KB.") + message.output_name = output_name + logger.info("Sending message to output:" + output_name + "...") + send_output_message_async = async_adapter.emulate_async(self._mqtt_pipeline.send_output_message) + callback = async_adapter.AwaitableCallback() + await send_output_message_async(message, callback=callback) + await handle_result(callback) + logger.info("Successfully sent message to output: " + output_name) + + @deprecation.deprecated(deprecated_in="2.3.0", + current_version=(device_constant.VERSION), + details="We recommend that you use the .on_message_received property to set a handler instead") + async def receive_message_on_input(self, input_name): + """Receive an input message that has been sent from another Module to a specific input. + + If no message is yet available, will wait until an item is available. + + :param str input_name: The input name to receive a message on. + + :returns: Message that was sent to the specified input. + :rtype: :class:`azure.iot.device.Message` + """ + self._check_receive_mode_is_api() + if not self._mqtt_pipeline.feature_enabled[constant.INPUT_MSG]: + await self._enable_feature(constant.INPUT_MSG) + inbox = self._inbox_manager.get_input_message_inbox(input_name) + logger.info("Waiting for input message on: " + input_name + "...") + message = await inbox.get() + logger.info("Input message received on: " + input_name) + return message + + async def invoke_method(self, method_params, device_id, module_id=None): + """Invoke a method from your client onto a device or module client, and receive the response to the method call. + + :param dict method_params: Should contain a methodName (str), payload (str), + connectTimeoutInSeconds (int), responseTimeoutInSeconds (int). + :param str device_id: Device ID of the target device where the method will be invoked. + :param str module_id: Module ID of the target module where the method will be invoked. (Optional) + + :returns: method_result should contain a status, and a payload + :rtype: dict + """ + logger.info("Invoking {} method on {}{}".format(method_params["methodName"], device_id, module_id)) + invoke_method_async = async_adapter.emulate_async(self._http_pipeline.invoke_method) + callback = async_adapter.AwaitableCallback(return_arg_name="invoke_method_response") + await invoke_method_async(device_id, method_params, callback=callback, module_id=module_id) + method_response = await handle_result(callback) + logger.info("Successfully invoked method") + return method_response + + @property + def on_message_received(self): + """The handler function or coroutine that will be called when an input message is received. + + The function definitionor coroutine should take one positional argument (the + :class:`azure.iot.device.Message` object)""" + return self._handler_manager.on_message_received + + @on_message_received.setter + def on_message_received(self, value): + self._generic_handler_setter("on_message_received", constant.INPUT_MSG, value) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_handler_manager.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_handler_manager.py new file mode 100644 index 0000000..00e6278 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_handler_manager.py @@ -0,0 +1,115 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/aio/async_handler_manager.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 8569 bytes +""" This module contains the manager for handler methods used by the aio clients""" +import asyncio, logging, inspect, threading, concurrent.futures +from azure.iot.device.common import asyncio_compat, handle_exceptions +from azure.iot.device.iothub.sync_handler_manager import AbstractHandlerManager, HandlerManagerException, HandlerRunnerKillerSentinel +from . import loop_management +logger = logging.getLogger(__name__) + +class AsyncHandlerManager(AbstractHandlerManager): + __doc__ = "Handler manager for use with asynchronous clients" + + async def _inbox_handler_runner(self, inbox, handler_name): + """Run infinite loop that waits for an inbox to receive an object from it, then calls + the handler with that object + """ + logger.debug("HANDLER RUNNER ({}): Starting runner".format(handler_name)) + + def _handler_callback(future): + try: + e = future.exception(timeout=0) + except Exception as raised_e: + try: + new_err = HandlerManagerException(message=("HANDLER ({}): Unable to retrieve exception data from incomplete invocation".format(handler_name)), + cause=raised_e) + handle_exceptions.handle_background_exception(new_err) + finally: + raised_e = None + del raised_e + + else: + if e: + new_err = HandlerManagerException(message=("HANDLER ({}): Error during invocation".format(handler_name)), + cause=e) + handle_exceptions.handle_background_exception(new_err) + else: + logger.debug("HANDLER ({}): Successfully completed invocation".format(handler_name)) + + tpe = concurrent.futures.ThreadPoolExecutor(max_workers=4) + while True: + handler_arg = await inbox.get() + if isinstance(handler_arg, HandlerRunnerKillerSentinel): + logger.debug("HANDLER RUNNER ({}): HandlerRunnerKillerSentinel found in inbox. Exiting.".format(handler_name)) + tpe.shutdown() + break + handler = getattr(self, handler_name) + logger.debug("HANDLER RUNNER ({}): Invoking handler".format(handler_name)) + if inspect.iscoroutinefunction(handler): + handler_loop = loop_management.get_client_handler_loop() + fut = asyncio.run_coroutine_threadsafe(handler(handler_arg), handler_loop) + fut.add_done_callback(_handler_callback) + else: + fut = tpe.submit(handler, handler_arg) + fut.add_done_callback(_handler_callback) + + async def _event_handler_runner(self, handler_name): + logger.error("._event_handler_runner() not yet implemented") + + def _start_handler_runner(self, handler_name): + """Create, and store a task for running a handler + """ + if self._handler_runners[handler_name] is not None: + raise HandlerManagerException("Cannot create task for handler runner: {}. Task already exists".format(handler_name)) + else: + inbox = self._get_inbox_for_handler(handler_name) + if inbox: + coro = self._inbox_handler_runner(inbox, handler_name) + else: + coro = self._event_handler_runner(handler_name) + runner_loop = loop_management.get_client_handler_runner_loop() + future = asyncio.run_coroutine_threadsafe(coro, runner_loop) + + def _handler_runner_callback(completed_future): + try: + e = completed_future.exception(timeout=0) + except Exception as raised_e: + try: + new_err = HandlerManagerException(message=("HANDLER RUNNER ({}): Unable to retrieve exception data from incomplete task".format(handler_name)), + cause=raised_e) + handle_exceptions.handle_background_exception(new_err) + finally: + raised_e = None + del raised_e + + else: + if e: + new_err = HandlerManagerException(message=("HANDLER RUNNER ({}): Unexpected error during task".format(handler_name)), + cause=e) + handle_exceptions.handle_background_exception(new_err) + self._handler_runners[handler_name] = None + self._start_handler_runner(handler_name) + else: + logger.debug("HANDLER RUNNER ({}): Task successfully completed without exception".format(handler_name)) + + future.add_done_callback(_handler_runner_callback) + self._handler_runners[handler_name] = future + logger.debug("Future for Handler Runner ({}) was stored".format(handler_name)) + + def _stop_handler_runner(self, handler_name): + """Stop and remove a handler runner task. + All pending items in the corresponding inbox will be handled by the handler before stoppage. + """ + logger.debug("Adding HandlerRunnerKillerSentinel to inbox corresponding to {} handler runner".format(handler_name)) + inbox = self._get_inbox_for_handler(handler_name) + inbox._put(HandlerRunnerKillerSentinel()) + logger.debug("Waiting for {} handler runner to exit...".format(handler_name)) + future = self._handler_runners[handler_name] + future.result() + self._handler_runners[handler_name] = None + logger.debug("Handler runner for {} has been stopped".format(handler_name)) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_inbox.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_inbox.py new file mode 100644 index 0000000..a90e921 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/async_inbox.py @@ -0,0 +1,69 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/aio/async_inbox.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3526 bytes +"""This module contains an Inbox class for use with an asynchronous client""" +import asyncio, threading, janus +from azure.iot.device.iothub.sync_inbox import AbstractInbox +from . import loop_management + +class AsyncClientInbox(AbstractInbox): + __doc__ = "Holds generic incoming data for an asynchronous client.\n\n All methods implemented in this class are threadsafe.\n " + + def __init__(self): + """Initializer for AsyncClientInbox.""" + + async def make_queue(): + return janus.Queue() + + loop = loop_management.get_client_internal_loop() + fut = asyncio.run_coroutine_threadsafe(make_queue(), loop) + self._queue = fut.result() + + def __contains__(self, item): + """Return True if item is in Inbox, False otherwise""" + with self._queue._sync_mutex: + return item in self._queue._queue + + def _put(self, item): + """Put an item into the Inbox. + + Block if necessary until a free slot is available. + Only to be used by the InboxManager. + + :param item: The item to be put in the Inbox. + """ + self._queue.sync_q.put(item) + + async def get(self): + """Remove and return an item from the Inbox. + + If Inbox is empty, wait until an item is available. + + :returns: An item from the Inbox. + """ + loop = loop_management.get_client_internal_loop() + fut = asyncio.run_coroutine_threadsafe(self._queue.async_q.get(), loop) + return await asyncio.wrap_future(fut) + + def empty(self): + """Returns True if the inbox is empty, False otherwise + + Note that there is a race condition here, and this may not be accurate. This is because + the .empty() operation on a janus queue is not threadsafe. + + :returns: Boolean indicating if the inbox is empty + """ + return self._queue.async_q.empty() + + def clear(self): + """Remove all items from the inbox. + """ + while True: + try: + self._queue.sync_q.get_nowait() + except janus.SyncQueueEmpty: + break diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/loop_management.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/loop_management.py new file mode 100644 index 0000000..4fa7f49 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/aio/loop_management.py @@ -0,0 +1,57 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/aio/loop_management.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2400 bytes +""" This module contains functions of managing event loops for the IoTHub client +""" +import asyncio, threading, logging +from azure.iot.device.common import asyncio_compat +logger = logging.getLogger(__name__) +loops = {'CLIENT_HANDLER_LOOP':None, + 'CLIENT_INTERNAL_LOOP':None, + 'CLIENT_HANDLER_RUNNER_LOOP':None} + +def _cleanup(): + """Clear all running loops and end respective threads. + ONLY FOR TESTING USAGE + By using this function, you can wipe all global loops. + DO NOT USE THIS IN PRODUCTION CODE + """ + for loop_name, loop in loops.items(): + if loop is not None: + logger.debug("Stopping event loop - {}".format(loop_name)) + loop.call_soon_threadsafe(loop.stop) + loops[loop_name] = None + + +def _make_new_loop(loop_name): + logger.debug("Creating new event loop - {}".format(loop_name)) + new_loop = asyncio.new_event_loop() + loop_thread = threading.Thread(target=(new_loop.run_forever)) + loop_thread.daemon = True + loop_thread.start() + loops[loop_name] = new_loop + + +def get_client_internal_loop(): + """Return the loop for internal client operations""" + if loops["CLIENT_INTERNAL_LOOP"] is None: + _make_new_loop("CLIENT_INTERNAL_LOOP") + return loops["CLIENT_INTERNAL_LOOP"] + + +def get_client_handler_runner_loop(): + """Return the loop for handler runners""" + if loops["CLIENT_HANDLER_RUNNER_LOOP"] is None: + _make_new_loop("CLIENT_HANDLER_RUNNER_LOOP") + return loops["CLIENT_HANDLER_RUNNER_LOOP"] + + +def get_client_handler_loop(): + """Return the loop for invoking user-provided handlers on the client""" + if loops["CLIENT_HANDLER_LOOP"] is None: + _make_new_loop("CLIENT_HANDLER_LOOP") + return loops["CLIENT_HANDLER_LOOP"] diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/edge_hsm.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/edge_hsm.py new file mode 100644 index 0000000..77fbe1d --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/edge_hsm.py @@ -0,0 +1,164 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/edge_hsm.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 6242 bytes +import logging, json, base64, requests, requests_unixsocket +from six.moves import urllib, http_client +from azure.iot.device.common.chainable_exception import ChainableException +from azure.iot.device.common.auth.signing_mechanism import SigningMechanism +from azure.iot.device import user_agent +requests_unixsocket.monkeypatch() +logger = logging.getLogger(__name__) + +class IoTEdgeError(ChainableException): + pass + + +class IoTEdgeHsm(SigningMechanism): + __doc__ = "\n Constructor for instantiating a iot hsm object. This is an object that\n communicates with the Azure IoT Edge HSM in order to get connection credentials\n for an Azure IoT Edge module. The credentials that this object return come in\n two forms:\n\n 1. The trust bundle, which is a certificate that can be used as a trusted cert\n to authenticate the SSL connection between the IoE Edge module and IoT Edge\n 2. A signing function, which can be used to create the sig field for a\n SharedAccessSignature string which can be used to authenticate with Iot Edge\n " + + def __init__(self, module_id, generation_id, workload_uri, api_version): + """ + Constructor for instantiating a Azure IoT Edge HSM object + + :param str module_id: The module id + :param str api_version: The API version + :param str generation_id: The module generation id + :param str workload_uri: The workload uri + """ + self.module_id = urllib.parse.quote(module_id, safe="") + self.api_version = api_version + self.generation_id = generation_id + self.workload_uri = _format_socket_uri(workload_uri) + + def get_certificate(self): + """ + Return the server verification certificate from the trust bundle that can be used to + validate the server-side SSL TLS connection that we use to talk to Edge + + :return: The server verification certificate to use for connections to the Azure IoT Edge + instance, as a PEM certificate in string form. + + :raises: IoTEdgeError if unable to retrieve the certificate. + """ + r = requests.get((self.workload_uri + "trust-bundle"), + params={"api-version": (self.api_version)}, + headers={"User-Agent": (urllib.parse.quote_plus(user_agent.get_iothub_user_agent()))}) + try: + r.raise_for_status() + except requests.exceptions.HTTPError as e: + try: + raise IoTEdgeError(message="Unable to get trust bundle from Edge", cause=e) + finally: + e = None + del e + + try: + bundle = r.json() + except ValueError as e: + try: + raise IoTEdgeError(message="Unable to decode trust bundle", cause=e) + finally: + e = None + del e + + try: + cert = bundle["certificate"] + except KeyError as e: + try: + raise IoTEdgeError(message="No certificate in trust bundle", cause=e) + finally: + e = None + del e + + return cert + + def sign(self, data_str): + """ + Use the IoTEdge HSM to sign a piece of string data. The caller should then insert the + returned value (the signature) into the 'sig' field of a SharedAccessSignature string. + + :param str data_str: The data string to sign + + :return: The signature, as a URI-encoded and base64-encoded value that is ready to + directly insert into the SharedAccessSignature string. + + :raises: IoTEdgeError if unable to sign the data. + """ + encoded_data_str = base64.b64encode(data_str.encode("utf-8")).decode() + path = "{workload_uri}modules/{module_id}/genid/{gen_id}/sign".format(workload_uri=(self.workload_uri), + module_id=(self.module_id), + gen_id=(self.generation_id)) + sign_request = {'keyId':"primary", + 'algo':"HMACSHA256", 'data':encoded_data_str} + r = requests.post(url=path, + params={"api-version": (self.api_version)}, + headers={"User-Agent": (urllib.parse.quote((user_agent.get_iothub_user_agent()), safe=""))}, + data=(json.dumps(sign_request))) + try: + r.raise_for_status() + except requests.exceptions.HTTPError as e: + try: + raise IoTEdgeError(message="Unable to sign data", cause=e) + finally: + e = None + del e + + try: + sign_response = r.json() + except ValueError as e: + try: + raise IoTEdgeError(message="Unable to decode signed data", cause=e) + finally: + e = None + del e + + try: + signed_data_str = sign_response["digest"] + except KeyError as e: + try: + raise IoTEdgeError(message="No signed data received", cause=e) + finally: + e = None + del e + + return signed_data_str + + +def _format_socket_uri(old_uri): + """ + This function takes a socket URI in one form and converts it into another form. + + The source form is based on what we receive inside the IOTEDGE_WORKLOADURI + environment variable, and it looks like this: + "unix:///var/run/iotedge/workload.sock" + + The destination form is based on what the requests_unixsocket library expects + and it looks like this: + "http+unix://%2Fvar%2Frun%2Fiotedge%2Fworkload.sock/" + + The function changes the prefix, uri-encodes the path, and adds a slash + at the end. + + If the socket URI does not start with unix:// this function only adds + a slash at the end. + + :param old_uri: The URI in IOTEDGE_WORKLOADURI form + + :return: The URI in requests_unixsocket form + """ + old_prefix = "unix://" + new_prefix = "http+unix://" + if old_uri.startswith(old_prefix): + stripped_uri = old_uri[len(old_prefix)[:None]] + if stripped_uri.endswith("/"): + stripped_uri = stripped_uri[None[:-1]] + new_uri = new_prefix + urllib.parse.quote(stripped_uri, safe="") + else: + new_uri = old_uri + if not new_uri.endswith("/"): + new_uri += "/" + return new_uri diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/inbox_manager.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/inbox_manager.py new file mode 100644 index 0000000..21bfb6e --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/inbox_manager.py @@ -0,0 +1,163 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/inbox_manager.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 7035 bytes +"""This module contains a manager for inboxes.""" +import logging +logger = logging.getLogger(__name__) + +class InboxManager(object): + __doc__ = "Manages the various Inboxes for a client.\n\n :ivar c2d_message_inbox: The C2D message Inbox.\n :ivar input_message_inboxes: A dictionary mapping input names to input message Inboxes.\n :ivar generic_method_request_inbox: The generic method request Inbox.\n :ivar named_method_request_inboxes: A dictionary mapping method names to method request Inboxes.\n " + + def __init__(self, inbox_type): + """Initializer for the InboxManager. + + :param inbox_type: An Inbox class that the manager will use to create Inboxes. + """ + self._create_inbox = inbox_type + self.unified_message_inbox = self._create_inbox() + self.generic_method_request_inbox = self._create_inbox() + self.twin_patch_inbox = self._create_inbox() + self.c2d_message_inbox = self._create_inbox() + self.input_message_inboxes = {} + self.named_method_request_inboxes = {} + self.use_unified_msg_mode = False + + def get_unified_message_inbox(self): + """Retrieve the Inbox for all messages (C2D and Input) + """ + return self.unified_message_inbox + + def get_input_message_inbox(self, input_name): + """Retrieve the input message Inbox for a given input. + + If the Inbox does not already exist, it will be created. + + :param str input_name: The name of the input for which the associated Inbox is desired. + :returns: An Inbox for input messages on the selected input. + """ + try: + inbox = self.input_message_inboxes[input_name] + except KeyError: + inbox = self._create_inbox() + self.input_message_inboxes[input_name] = inbox + + return inbox + + def get_c2d_message_inbox(self): + """Retrieve the Inbox for C2D messages. + + :returns: An Inbox for C2D messages. + """ + return self.c2d_message_inbox + + def get_method_request_inbox(self, method_name=None): + """Retrieve the method request Inbox for a given method name if provided, + or for generic method requests if not. + + If the Inbox does not already exist, it will be created. + + :param str method_name: Optional. The name of the method for which the + associated Inbox is desired. + :returns: An Inbox for method requests. + """ + if method_name: + try: + inbox = self.named_method_request_inboxes[method_name] + except KeyError: + inbox = self._create_inbox() + self.named_method_request_inboxes[method_name] = inbox + + else: + inbox = self.generic_method_request_inbox + return inbox + + def get_twin_patch_inbox(self): + """Retrieve the Inbox for twin patches that arrive from the service + + :returns: An Inbox for twin patches + """ + return self.twin_patch_inbox + + def clear_all_method_requests(self): + """Delete all method requests currently in inboxes. + """ + self.generic_method_request_inbox.clear() + for inbox in self.named_method_request_inboxes.values(): + inbox.clear() + + def route_input_message(self, incoming_message): + """Route an incoming input message + + In unified message mode, route to the unified message inbox + + In standard mode, route to the corresponding input message Inbox. If the input + is unknown, the message will be dropped. + + :param incoming_message: The message to be routed. + + :returns: Boolean indicating if message was successfuly routed or not. + """ + input_name = incoming_message.input_name + if self.use_unified_msg_mode: + self.unified_message_inbox._put(incoming_message) + return True + try: + inbox = self.input_message_inboxes[input_name] + except KeyError: + logger.warning("No input message inbox for {} - dropping message".format(input_name)) + return False + else: + inbox._put(incoming_message) + logger.debug("Input message sent to {} inbox".format(input_name)) + return True + + def route_c2d_message(self, incoming_message): + """Route an incoming C2D message + + In unified message mode, route to the unified message inbox. + + In standard mode, route to to the C2D message Inbox. + + :param incoming_message: The message to be routed. + + :returns: Boolean indicating if message was successfully routed or not. + """ + if self.use_unified_msg_mode: + self.unified_message_inbox._put(incoming_message) + return True + self.c2d_message_inbox._put(incoming_message) + logger.debug("C2D message sent to inbox") + return True + + def route_method_request(self, incoming_method_request): + """Route an incoming method request to the correct method request Inbox. + + If the method name is recognized, it will be routed to a method-specific Inbox. + Otherwise, it will be routed to the generic method request Inbox. + + :param incoming_method_request: The method request to be routed. + + :returns: Boolean indicating if the method request was successfully routed or not. + """ + try: + inbox = self.named_method_request_inboxes[incoming_method_request.name] + except KeyError: + inbox = self.generic_method_request_inbox + + inbox._put(incoming_method_request) + return True + + def route_twin_patch(self, incoming_patch): + """Route an incoming twin patch to the twin patch Inbox. + + :param incoming_patch: The patch to be routed. + + :returns: Boolean indicating if patch was successfully routed or not. + """ + self.twin_patch_inbox._put(incoming_patch) + logger.debug("twin patch message sent to inbox") + return True diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/__init__.py new file mode 100644 index 0000000..85e2e4e --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/__init__.py @@ -0,0 +1,13 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/models/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 202 bytes +"""Azure IoT Hub Device SDK Models + +This package provides object models for use within the Azure IoT Hub Device SDK. +""" +from .message import Message +from .methods import MethodRequest, MethodResponse diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/message.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/message.py new file mode 100644 index 0000000..dd0b9d0 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/message.py @@ -0,0 +1,58 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/models/message.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3686 bytes +"""This module contains a class representing messages that are sent or received. +""" +from azure.iot.device import constant +import sys + +class Message(object): + __doc__ = "Represents a message to or from IoTHub\n\n :ivar data: The data that constitutes the payload\n :ivar custom_properties: Dictionary of custom message properties. The keys and values of these properties will always be string.\n :ivar message id: A user-settable identifier for the message used for request-reply patterns. Format: A case-sensitive string (up to 128 characters long) of ASCII 7-bit alphanumeric characters + {'-', ':', '.', '+', '%', '_', '#', '*', '?', '!', '(', ')', ',', '=', '@', ';', '$', '''}\n :ivar expiry_time_utc: Date and time of message expiration in UTC format\n :ivar correlation_id: A property in a response message that typically contains the message_id of the request, in request-reply patterns\n :ivar user_id: An ID to specify the origin of messages\n :ivar content_encoding: Content encoding of the message data. Can be 'utf-8', 'utf-16' or 'utf-32'\n :ivar content_type: Content type property used to route messages with the message-body. Can be 'application/json'\n :ivar output_name: Name of the output that the message is being sent to.\n :ivar input_name: Name of the input that the message was received on.\n " + + def __init__(self, data, message_id=None, content_encoding=None, content_type=None, output_name=None): + """ + Initializer for Message + + :param data: The data that constitutes the payload + :param str message_id: A user-settable identifier for the message used for request-reply patterns. Format: A case-sensitive string (up to 128 characters long) of ASCII 7-bit alphanumeric characters + {'-', ':', '.', '+', '%', '_', '#', '*', '?', '!', '(', ')', ',', '=', '@', ';', '$', '''} + :param str content_encoding: Content encoding of the message data. Other values can be utf-16' or 'utf-32' + :param str content_type: Content type property used to routes with the message body. + :param str output_name: Name of the output that the is being sent to. + """ + self.data = data + self.custom_properties = {} + self.message_id = message_id + self.expiry_time_utc = None + self.correlation_id = None + self.user_id = None + self.content_encoding = content_encoding + self.content_type = content_type + self.output_name = output_name + self.input_name = None + self._iothub_interface_id = None + + @property + def iothub_interface_id(self): + return self._iothub_interface_id + + def set_as_security_message(self): + """ + Set the message as a security message. + + This is a provisional API. Functionality not yet guaranteed. + """ + self._iothub_interface_id = constant.SECURITY_MESSAGE_INTERFACE_ID + + def __str__(self): + return str(self.data) + + def get_size(self): + total = 0 + total = total + sum((sys.getsizeof(v) for v in self.__dict__.values() if v is not None if v is not self.custom_properties)) + if self.custom_properties: + total = total + sum((sys.getsizeof(v) for v in self.custom_properties.values() if v is not None)) + return total diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/methods.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/methods.py new file mode 100644 index 0000000..aa5d35a --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/methods.py @@ -0,0 +1,62 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/models/methods.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2728 bytes +"""This module contains classes related to direct method invocations. +""" + +class MethodRequest(object): + __doc__ = "Represents a request to invoke a direct method.\n\n :ivar str request_id: The request id.\n :ivar str name: The name of the method to be invoked.\n :ivar dict payload: The JSON payload being sent with the request.\n " + + def __init__(self, request_id, name, payload): + """Initializer for a MethodRequest. + + :param str request_id: The request id. + :param str name: The name of the method to be invoked + :param dict payload: The JSON payload being sent with the request. + """ + self._request_id = request_id + self._name = name + self._payload = payload + + @property + def request_id(self): + return self._request_id + + @property + def name(self): + return self._name + + @property + def payload(self): + return self._payload + + +class MethodResponse(object): + __doc__ = "Represents a response to a direct method.\n\n :ivar str request_id: The request id of the MethodRequest being responded to.\n :ivar int status: The status of the execution of the MethodRequest.\n :ivar payload: The JSON payload to be sent with the response.\n :type payload: dict, str, int, float, bool, or None (JSON compatible values)\n " + + def __init__(self, request_id, status, payload=None): + """Initializer for MethodResponse. + + :param str request_id: The request id of the MethodRequest being responded to. + :param int status: The status of the execution of the MethodRequest. + :param payload: The JSON payload to be sent with the response. (OPTIONAL) + :type payload: dict, str, int, float, bool, or None (JSON compatible values) + """ + self.request_id = request_id + self.status = status + self.payload = payload + + @classmethod + def create_from_method_request(cls, method_request, status, payload=None): + """Factory method for creating a MethodResponse from a MethodRequest. + + :param method_request: The MethodRequest object to respond to. + :type method_request: MethodRequest. + :param int status: The status of the execution of the MethodRequest. + :type payload: dict, str, int, float, bool, or None (JSON compatible values) + """ + return cls(request_id=(method_request.request_id), status=status, payload=payload) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/twin.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/twin.py new file mode 100644 index 0000000..fb2d6bb --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/models/twin.py @@ -0,0 +1,18 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/models/twin.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1329 bytes +"""This module contains classes related to device twin and module twin functionality +""" + +class Twin(object): + __doc__ = "Represents a device twin or module twin\n\n :ivar desired_properties: The desired properties for the Twin. These are properties\n which are sent _to_ the device or module to indicate the _desired_ state of the device\n or module\n :type desired_properties: dict, str, int, float, bool, or None (JSON compatible values)\n :ivar reported_properties: The reported properties for the Twin. These are properties\n which are sent _from_ the device or module to indicate the _actual_ state of the device.\n :type reported_properties: dict, str, int, float, bool, or None (JSON compatible values)\n " + + def __init__(self): + """Initializer for a Twin object + """ + self.desiried_properties = None + self.reported_properties = None diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/__init__.py new file mode 100644 index 0000000..7971eff --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/__init__.py @@ -0,0 +1,16 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 270 bytes +"""Azure IoT Hub Device SDK Pipeline + +This package provides a protocol pipeline for use with the Azure IoT Hub Device SDK. + +INTERNAL USAGE ONLY +""" +from .mqtt_pipeline import MQTTPipeline +from .http_pipeline import HTTPPipeline +from .config import IoTHubPipelineConfig diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/config.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/config.py new file mode 100644 index 0000000..a4086ce --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/config.py @@ -0,0 +1,30 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/config.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1962 bytes +import logging +from azure.iot.device.common.pipeline.config import BasePipelineConfig +logger = logging.getLogger(__name__) + +class IoTHubPipelineConfig(BasePipelineConfig): + __doc__ = "A class for storing all configurations/options for IoTHub clients in the Azure IoT Python Device Client Library.\n " + + def __init__(self, hostname, device_id, module_id=None, product_info='', **kwargs): + """Initializer for IoTHubPipelineConfig which passes all unrecognized keyword-args down to BasePipelineConfig + to be evaluated. This stacked options setting is to allow for unique configuration options to exist between the + multiple clients, while maintaining a base configuration class with shared config options. + + :param str hostname: The hostname of the IoTHub to connect to + :param str device_id: The device identity being used with the IoTHub + :param str module_id: The module identity being used with the IoTHub + :param str product_info: A custom identification string for the type of device connecting to Azure IoT Hub. + """ + (super(IoTHubPipelineConfig, self).__init__)(hostname=hostname, **kwargs) + self.device_id = device_id + self.module_id = module_id + self.product_info = product_info + self.blob_upload = False + self.method_invoke = False diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/constant.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/constant.py new file mode 100644 index 0000000..d205651 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/constant.py @@ -0,0 +1,14 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/constant.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 498 bytes +"""This module contains constants realted to the pipeline package. +""" +C2D_MSG = "c2d" +INPUT_MSG = "input" +METHODS = "methods" +TWIN = "twin" +TWIN_PATCHES = "twin_patches" diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/exceptions.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/exceptions.py new file mode 100644 index 0000000..7c120b7 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/exceptions.py @@ -0,0 +1,10 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/exceptions.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1125 bytes +"""This module defines an exception surface, exposed as part of the pipeline API""" +from azure.iot.device.common.pipeline.pipeline_exceptions import * +from azure.iot.device.common.transport_exceptions import ConnectionFailedError, ConnectionDroppedError, UnauthorizedError, ProtocolClientError, TlsExchangeAuthError, ProtocolProxyError diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_map_error.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_map_error.py new file mode 100644 index 0000000..dff88cb --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_map_error.py @@ -0,0 +1,61 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/http_map_error.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3301 bytes + + +def translate_error(sc, reason): + """ + Codes_SRS_NODE_IOTHUB_REST_API_CLIENT_16_012: [Any error object returned by translate_error shall inherit from the generic Error Javascript object and have 3 properties: + - response shall contain the IncomingMessage object returned by the HTTP layer. + - reponseBody shall contain the content of the HTTP response. + - message shall contain a human-readable error message.] + """ + message = "Error: {}".format(reason) + if sc == 400: + error = "ArgumentError({})".format(message) + else: + if sc == 401: + error = "UnauthorizedError({})".format(message) + else: + if sc == 403: + error = "TooManyDevicesError({})".format(message) + else: + if sc == 404: + if reason == "Device Not Found": + error = "DeviceNotFoundError({})".format(message) + else: + if reason == "IoTHub Not Found": + error = "IotHubNotFoundError({})".format(message) + else: + error = "Error('Not found')" + else: + if sc == 408: + error = "DeviceTimeoutError({})".format(message) + else: + if sc == 409: + error = "DeviceAlreadyExistsError({})".format(message) + else: + if sc == 412: + error = "InvalidEtagError({})".format(message) + else: + if sc == 429: + error = "ThrottlingError({})".format(message) + else: + if sc == 500: + error = "InternalServerError({})".format(message) + else: + if sc == 502: + error = "BadDeviceResponseError({})".format(message) + else: + if sc == 503: + error = "ServiceUnavailableError({})".format(message) + else: + if sc == 504: + error = "GatewayTimeoutError({})".format(message) + else: + error = "Error({})".format(message) + return error diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_path_iothub.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_path_iothub.py new file mode 100644 index 0000000..9cfc41f --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_path_iothub.py @@ -0,0 +1,40 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/http_path_iothub.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1833 bytes +import logging +import six.moves.urllib as urllib +logger = logging.getLogger(__name__) + +def get_method_invoke_path(device_id, module_id=None): + """ + :return: The path for invoking methods from one module to a device or module. It is of the format + twins/uri_encode($device_id)/modules/uri_encode($module_id)/methods + """ + if module_id: + return "twins/{device_id}/modules/{module_id}/methods".format(device_id=(urllib.parse.quote_plus(device_id)), + module_id=(urllib.parse.quote_plus(module_id))) + return "twins/{device_id}/methods".format(device_id=(urllib.parse.quote_plus(device_id))) + + +def get_storage_info_for_blob_path(device_id): + """ + This does not take a module_id since get_storage_info_for_blob_path should only ever be invoked on device clients. + + :return: The path for getting the storage sdk credential information from IoT Hub. It is of the format + devices/uri_encode($device_id)/files + """ + return "devices/{}/files".format(urllib.parse.quote_plus(device_id)) + + +def get_notify_blob_upload_status_path(device_id): + """ + This does not take a module_id since get_notify_blob_upload_status_path should only ever be invoked on device clients. + + :return: The path for getting the storage sdk credential information from IoT Hub. It is of the format + devices/uri_encode($device_id)/files/notifications + """ + return "devices/{}/files/notifications".format(urllib.parse.quote_plus(device_id)) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_pipeline.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_pipeline.py new file mode 100644 index 0000000..844ec97 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/http_pipeline.py @@ -0,0 +1,115 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/http_pipeline.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 8011 bytes +import logging, sys +from azure.iot.device.common.evented_callback import EventedCallback +from azure.iot.device.common.pipeline import pipeline_stages_base, pipeline_ops_base, pipeline_stages_http +from azure.iot.device.iothub.pipeline import exceptions as pipeline_exceptions +from . import constant, pipeline_stages_iothub, pipeline_ops_iothub, pipeline_ops_iothub_http, pipeline_stages_iothub_http +logger = logging.getLogger(__name__) + +class HTTPPipeline(object): + __doc__ = "Pipeline to communicate with Edge.\n Uses HTTP.\n " + + def __init__(self, pipeline_configuration): + """ + Constructor for instantiating a pipeline adapter object. + + :param auth_provider: The authentication provider + :param pipeline_configuration: The configuration generated based on user inputs + """ + self._pipeline = pipeline_stages_base.PipelineRootStage(pipeline_configuration).append_stage(pipeline_stages_iothub_http.IoTHubHTTPTranslationStage()).append_stage(pipeline_stages_http.HTTPTransportStage()) + callback = EventedCallback() + op = pipeline_ops_base.InitializePipelineOperation(callback=callback) + self._pipeline.run_op(op) + callback.wait_for_completion() + + def invoke_method(self, device_id, method_params, callback, module_id=None): + """ + Send a request to the service to invoke a method on a target device or module. + + :param device_id: The target device id + :param method_params: The method parameters to be invoked on the target client + :param callback: callback which is called when request has been fulfilled. + On success, this callback is called with the error=None. + On failure, this callback is called with error set to the cause of the failure. + :param module_id: The target module id + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("HTTPPipeline invoke_method called") + if not self._pipeline.pipeline_configuration.method_invoke: + error = pipeline_exceptions.PipelineError("invoke_method called, but it is only supported on module clients generated from an edge environment. If you are not using a module generated from an edge environment, you cannot use invoke_method") + return callback(error=error) + + def on_complete(op, error): + callback(error=error, invoke_method_response=(op.method_response)) + + self._pipeline.run_op(pipeline_ops_iothub_http.MethodInvokeOperation(target_device_id=device_id, + target_module_id=module_id, + method_params=method_params, + callback=on_complete)) + + def get_storage_info_for_blob(self, blob_name, callback): + """ + Sends a POST request to the IoT Hub service endpoint to retrieve an object that contains information for uploading via the Storage SDK. + + :param blob_name: The name of the blob that will be uploaded via the Azure Storage SDK. + :param callback: callback which is called when request has been fulfilled. + On success, this callback is called with the error=None, and the storage_info set to the information JSON received from the service. + On failure, this callback is called with error set to the cause of the failure, and the storage_info=None. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("HTTPPipeline get_storage_info_for_blob called") + if not self._pipeline.pipeline_configuration.blob_upload: + error = pipeline_exceptions.PipelineError("get_storage_info_for_blob called, but it is only supported for use with device clients. Ensure you are using a device client.") + return callback(error=error) + + def on_complete(op, error): + callback(error=error, storage_info=(op.storage_info)) + + self._pipeline.run_op(pipeline_ops_iothub_http.GetStorageInfoOperation(blob_name=blob_name, + callback=on_complete)) + + def notify_blob_upload_status(self, correlation_id, is_success, status_code, status_description, callback): + """ + Sends a POST request to a IoT Hub service endpoint to notify the status of the Storage SDK call for a blob upload. + + :param str correlation_id: Provided by IoT Hub on get_storage_info_for_blob request. + :param bool is_success: A boolean that indicates whether the file was uploaded successfully. + :param int status_code: A numeric status code that is the status for the upload of the fiel to storage. + :param str status_description: A description that corresponds to the status_code. + + :param callback: callback which is called when request has been fulfilled. + On success, this callback is called with the error=None. + On failure, this callback is called with error set to the cause of the failure. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("HTTPPipeline notify_blob_upload_status called") + if not self._pipeline.pipeline_configuration.blob_upload: + error = pipeline_exceptions.PipelineError("notify_blob_upload_status called, but it is only supported for use with device clients. Ensure you are using a device client.") + return callback(error=error) + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_iothub_http.NotifyBlobUploadStatusOperation(correlation_id=correlation_id, + is_success=is_success, + status_code=status_code, + status_description=status_description, + callback=on_complete)) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/mqtt_pipeline.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/mqtt_pipeline.py new file mode 100644 index 0000000..6f3a528 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/mqtt_pipeline.py @@ -0,0 +1,308 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/mqtt_pipeline.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 18148 bytes +import logging, sys +from azure.iot.device.common.evented_callback import EventedCallback +from azure.iot.device.common.pipeline import pipeline_stages_base, pipeline_ops_base, pipeline_stages_mqtt +from . import constant, pipeline_stages_iothub, pipeline_events_iothub, pipeline_ops_iothub, pipeline_stages_iothub_mqtt +logger = logging.getLogger(__name__) + +class MQTTPipeline(object): + + def __init__(self, pipeline_configuration): + """ + Constructor for instantiating a pipeline adapter object + :param auth_provider: The authentication provider + :param pipeline_configuration: The configuration generated based on user inputs + """ + self.feature_enabled = {(constant.C2D_MSG): False, + (constant.INPUT_MSG): False, + (constant.METHODS): False, + (constant.TWIN): False, + (constant.TWIN_PATCHES): False} + self.on_connected = None + self.on_disconnected = None + self.on_c2d_message_received = None + self.on_input_message_received = None + self.on_method_request_received = None + self.on_twin_patch_received = None + self._pipeline = pipeline_stages_base.PipelineRootStage(pipeline_configuration).append_stage(pipeline_stages_base.SasTokenRenewalStage()).append_stage(pipeline_stages_iothub.EnsureDesiredPropertiesStage()).append_stage(pipeline_stages_iothub.TwinRequestResponseStage()).append_stage(pipeline_stages_base.CoordinateRequestAndResponseStage()).append_stage(pipeline_stages_iothub_mqtt.IoTHubMQTTTranslationStage()).append_stage(pipeline_stages_base.AutoConnectStage()).append_stage(pipeline_stages_base.ReconnectStage()).append_stage(pipeline_stages_base.ConnectionLockStage()).append_stage(pipeline_stages_base.RetryStage()).append_stage(pipeline_stages_base.OpTimeoutStage()).append_stage(pipeline_stages_mqtt.MQTTTransportStage()) + + def _on_pipeline_event(event): + if isinstance(event, pipeline_events_iothub.C2DMessageEvent): + if self.on_c2d_message_received: + self.on_c2d_message_received(event.message) + else: + logger.error("C2D message event received with no handler. dropping.") + else: + if isinstance(event, pipeline_events_iothub.InputMessageEvent): + if self.on_input_message_received: + self.on_input_message_received(event.message) + else: + logger.error("input message event received with no handler. dropping.") + else: + if isinstance(event, pipeline_events_iothub.MethodRequestEvent): + if self.on_method_request_received: + self.on_method_request_received(event.method_request) + else: + logger.error("Method request event received with no handler. Dropping.") + else: + if isinstance(event, pipeline_events_iothub.TwinDesiredPropertiesPatchEvent): + if self.on_twin_patch_received: + self.on_twin_patch_received(event.patch) + else: + logger.error("Twin patch event received with no handler. Dropping.") + else: + logger.error("Dropping unknown pipeline event {}".format(event.name)) + + def _on_connected(): + if self.on_connected: + self.on_connected() + + def _on_disconnected(): + if self.on_disconnected: + self.on_disconnected() + + self._pipeline.on_pipeline_event_handler = _on_pipeline_event + self._pipeline.on_connected_handler = _on_connected + self._pipeline.on_disconnected_handler = _on_disconnected + callback = EventedCallback() + op = pipeline_ops_base.InitializePipelineOperation(callback=callback) + self._pipeline.run_op(op) + callback.wait_for_completion() + + def connect(self, callback): + """ + Connect to the service. + + :param callback: callback which is called when the connection to the service is complete. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("Starting ConnectOperation on the pipeline") + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_base.ConnectOperation(callback=on_complete)) + + def disconnect(self, callback): + """ + Disconnect from the service. + + :param callback: callback which is called when the connection to the service has been disconnected + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("Starting DisconnectOperation on the pipeline") + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_base.DisconnectOperation(callback=on_complete)) + + def reauthorize_connection(self, callback): + """ + Reauthorize connection to the service. + + Technically, this function will return upon disconnection. The disconnection will then + immediately trigger a reconnect, but this function will not wait for that to return. + This is (unfortunately) necessary while supporting MQTT3. + + :param callback: callback which is called when the connection to the service has been disconnected + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("Starting ReauthorizeConnectionOperation on the pipeline") + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_base.ReauthorizeConnectionOperation(callback=on_complete)) + + def send_message(self, message, callback): + """ + Send a telemetry message to the service. + + :param message: message to send. + :param callback: callback which is called when the message publish has been acknowledged by the service. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_iothub.SendD2CMessageOperation(message=message, callback=on_complete)) + + def send_output_message(self, message, callback): + """ + Send an output message to the service. + + :param message: message to send. + :param callback: callback which is called when the message publish has been acknowledged by the service. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_iothub.SendOutputMessageOperation(message=message, callback=on_complete)) + + def send_method_response(self, method_response, callback): + """ + Send a method response to the service. + + :param method_response: the method response to send + :param callback: callback which is called when response has been acknowledged by the service + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("MQTTPipeline send_method_response called") + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_iothub.SendMethodResponseOperation(method_response=method_response, + callback=on_complete)) + + def get_twin(self, callback): + """ + Send a request for a full twin to the service. + + :param callback: callback which is called when request has been acknowledged by the service. + This callback should have two parameters. On success, this callback is called with the + requested twin and error=None. On failure, this callback is called with None for the requested + twin and error set to the cause of the failure. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + + def on_complete(op, error): + if error: + callback(error=error, twin=None) + else: + callback(twin=(op.twin)) + + self._pipeline.run_op(pipeline_ops_iothub.GetTwinOperation(callback=on_complete)) + + def patch_twin_reported_properties(self, patch, callback): + """ + Send a patch for a twin's reported properties to the service. + + :param patch: the reported properties patch to send + :param callback: callback which is called when request has been acknowledged by the service. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionFailedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ConnectionDroppedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.UnauthorizedError` + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_iothub.PatchTwinReportedPropertiesOperation(patch=patch, + callback=on_complete)) + + def enable_feature(self, feature_name, callback): + """ + Enable the given feature by subscribing to the appropriate topics. + + :param feature_name: one of the feature name constants from constant.py + :param callback: callback which is called when the feature is enabled + + :raises: ValueError if feature_name is invalid + """ + logger.debug("enable_feature {} called".format(feature_name)) + if feature_name not in self.feature_enabled: + raise ValueError("Invalid feature_name") + + def on_complete(op, error): + if error: + logger.error("Subscribe for {} failed. Not enabling feature".format(feature_name)) + else: + self.feature_enabled[feature_name] = True + callback(error=error) + + self._pipeline.run_op(pipeline_ops_base.EnableFeatureOperation(feature_name=feature_name, + callback=on_complete)) + + def disable_feature(self, feature_name, callback): + """ + Disable the given feature by subscribing to the appropriate topics. + :param callback: callback which is called when the feature is disabled + + :param feature_name: one of the feature name constants from constant.py + + :raises: ValueError if feature_name is invalid + """ + logger.debug("disable_feature {} called".format(feature_name)) + if feature_name not in self.feature_enabled: + raise ValueError("Invalid feature_name") + self.feature_enabled[feature_name] = False + + def on_complete(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_base.DisableFeatureOperation(feature_name=feature_name, + callback=on_complete)) + + @property + def pipeline_configuration(self): + """ + Pipeline Configuration for the pipeline. Note that while a new config object cannot be + provided (read-only), the values stored in the config object CAN be changed. + """ + return self._pipeline.pipeline_configuration + + @property + def connected(self): + """ + Read-only property to indicate if the transport is connected or not. + """ + return self._pipeline.connected diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/mqtt_topic_iothub.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/mqtt_topic_iothub.py new file mode 100644 index 0000000..0c1542e --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/mqtt_topic_iothub.py @@ -0,0 +1,342 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/mqtt_topic_iothub.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 15230 bytes +import logging +from datetime import date +import six.moves.urllib as urllib +from azure.iot.device.common import version_compat +logger = logging.getLogger(__name__) + +def _get_topic_base(device_id, module_id=None): + """ + return the string that is at the beginning of all topics for this + device/module + """ + topic = "devices/" + str(device_id) + if module_id: + topic = topic + "/modules/" + str(module_id) + return topic + + +def get_c2d_topic_for_subscribe(device_id): + """ + :return: The topic for cloud to device messages.It is of the format + "devices//messages/devicebound/#" + """ + return _get_topic_base(device_id) + "/messages/devicebound/#" + + +def get_input_topic_for_subscribe(device_id, module_id): + """ + :return: The topic for input messages. It is of the format + "devices//modules//inputs/#" + """ + return _get_topic_base(device_id, module_id) + "/inputs/#" + + +def get_method_topic_for_subscribe(): + """ + :return: The topic for ALL incoming methods. It is of the format + "$iothub/methods/POST/#" + """ + return "$iothub/methods/POST/#" + + +def get_twin_response_topic_for_subscribe(): + """ + :return: The topic for ALL incoming twin responses. It is of the format + "$iothub/twin/res/#" + """ + return "$iothub/twin/res/#" + + +def get_twin_patch_topic_for_subscribe(): + """ + :return: The topic for ALL incoming twin patches. It is of the format + "$iothub/twin/PATCH/properties/desired/# + """ + return "$iothub/twin/PATCH/properties/desired/#" + + +def get_telemetry_topic_for_publish(device_id, module_id): + """ + return the topic string used to publish telemetry + """ + return _get_topic_base(device_id, module_id) + "/messages/events/" + + +def get_method_topic_for_publish(request_id, status): + """ + :return: The topic for publishing method responses. It is of the format + "$iothub/methods/res//?$rid= + """ + return "$iothub/methods/res/{status}/?$rid={request_id}".format(status=urllib.parse.quote((str(status)), safe=""), + request_id=urllib.parse.quote((str(request_id)), safe="")) + + +def get_twin_topic_for_publish(method, resource_location, request_id): + """ + :return: The topic for publishing twin requests / patches. It is of the format + "$iothub/twin/?$rid= + """ + return "$iothub/twin/{method}{resource_location}?$rid={request_id}".format(method=method, + resource_location=resource_location, + request_id=urllib.parse.quote((str(request_id)), safe="")) + + +def is_c2d_topic(topic, device_id): + """ + Topics for c2d message are of the following format: + devices//messages/devicebound + :param topic: The topic string + """ + if "devices/{}/messages/devicebound".format(device_id) in topic: + return True + return False + + +def is_input_topic(topic, device_id, module_id): + """ + Topics for inputs are of the following format: + devices//modules//inputs/ + :param topic: The topic string + """ + return device_id and module_id or False + if "devices/{}/modules/{}/inputs/".format(device_id, module_id) in topic: + return True + return False + + +def is_method_topic(topic): + """ + Topics for methods are of the following format: + "$iothub/methods/POST/{method name}/?$rid={request id}" + + :param str topic: The topic string. + """ + if "$iothub/methods/POST" in topic: + return True + return False + + +def is_twin_response_topic(topic): + """Topics for twin responses are of the following format: + $iothub/twin/res/{status}/?$rid={rid} + + :param str topic: The topic string + """ + return topic.startswith("$iothub/twin/res/") + + +def is_twin_desired_property_patch_topic(topic): + """Topics for twin desired property patches are of the following format: + $iothub/twin/PATCH/properties/desired + + :param str topic: The topic string + """ + return topic.startswith("$iothub/twin/PATCH/properties/desired") + + +def get_input_name_from_topic(topic): + """ + Extract the input channel from the topic name + Topics for inputs are of the following format: + devices//modules//inputs/ + + :param topic: The topic string + """ + parts = topic.split("/") + if len(parts) > 5: + if parts[4] == "inputs": + return urllib.parse.unquote(parts[5]) + raise ValueError("topic has incorrect format") + + +def get_method_name_from_topic(topic): + """ + Extract the method name from the method topic. + Topics for methods are of the following format: + "$iothub/methods/POST/{method name}/?$rid={request id}" + + :param str topic: The topic string + """ + parts = topic.split("/") + if is_method_topic(topic): + if len(parts) >= 4: + return urllib.parse.unquote(parts[3]) + raise ValueError("topic has incorrect format") + + +def get_method_request_id_from_topic(topic): + """ + Extract the Request ID (RID) from the method topic. + Topics for methods are of the following format: + "$iothub/methods/POST/{method name}/?$rid={request id}" + + :param str topic: the topic string + :raises: ValueError if topic has incorrect format + :returns: request id from topic string + """ + parts = topic.split("/") + if is_method_topic(topic): + if len(parts) >= 4: + properties = _extract_properties(topic.split("?")[1]) + return properties["rid"] + raise ValueError("topic has incorrect format") + + +def get_twin_request_id_from_topic(topic): + """ + Extract the Request ID (RID) from the twin response topic. + Topics for twin response are in the following format: + "$iothub/twin/res/{status}/?$rid={rid}" + + :param str topic: The topic string + :raises: ValueError if topic has incorrect format + :returns: request id from topic string + """ + parts = topic.split("/") + if is_twin_response_topic(topic): + if len(parts) >= 4: + properties = _extract_properties(topic.split("?")[1]) + return properties["rid"] + raise ValueError("topic has incorrect format") + + +def get_twin_status_code_from_topic(topic): + """ + Extract the status code from the twin response topic. + Topics for twin response are in the following format: + "$iothub/twin/res/{status}/?$rid={rid}" + + :param str topic: The topic string + :raises: ValueError if the topic has incorrect format + :returns status code from topic string + """ + parts = topic.split("/") + if is_twin_response_topic(topic): + if len(parts) >= 4: + return urllib.parse.unquote(parts[3]) + raise ValueError("topic has incorrect format") + + +def extract_message_properties_from_topic(topic, message_received): + """ + Extract key=value pairs from custom properties and set the properties on the received message. + For extracting values corresponding to keys the following rules are followed:- + If there is NO "=", the value is None + If there is "=" with no value, the value is an empty string + For anything else the value after "=" and before `&` is considered as the proper value + :param topic: The topic string + :param message_received: The message received with the payload in bytes + """ + parts = topic.split("/") + if len(parts) > 4: + if parts[4] == "inputs": + if len(parts) > 6: + properties = parts[6] + else: + properties = None + else: + if len(parts) > 3: + if parts[3] == "devicebound": + if len(parts) > 4: + properties = parts[4] + else: + properties = None + else: + raise ValueError("topic has incorrect format") + ignored_extraction_values = ["iothub-ack", "$.to"] + if properties: + key_value_pairs = properties.split("&") + for entry in key_value_pairs: + pair = entry.split("=") + key = urllib.parse.unquote(pair[0]) + if len(pair) > 1: + value = urllib.parse.unquote(pair[1]) + else: + value = None + if key in ignored_extraction_values: + continue + elif key == "$.mid": + message_received.message_id = value + elif key == "$.cid": + message_received.correlation_id = value + elif key == "$.uid": + message_received.user_id = value + elif key == "$.ct": + message_received.content_type = value + elif key == "$.ce": + message_received.content_encoding = value + elif key == "$.exp": + message_received.expiry_time_utc = value + else: + message_received.custom_properties[key] = value + + +def encode_message_properties_in_topic(message_to_send, topic): + """ + uri-encode the system properties of a message as key-value pairs on the topic with defined keys. + Additionally if the message has user defined properties, the property keys and values shall be + uri-encoded and appended at the end of the above topic with the following convention: + '=&=&=(...)' + :param message_to_send: The message to send + :param topic: The topic which has not been encoded yet. For a device it looks like + "devices//messages/events/" and for a module it looks like + "devices//modules//messages/events/ + :return: The topic which has been uri-encoded + """ + system_properties = [] + if message_to_send.output_name: + system_properties.append(("$.on", str(message_to_send.output_name))) + elif message_to_send.message_id: + system_properties.append(("$.mid", str(message_to_send.message_id))) + if message_to_send.correlation_id: + system_properties.append(("$.cid", str(message_to_send.correlation_id))) + if message_to_send.user_id: + system_properties.append(("$.uid", str(message_to_send.user_id))) + if message_to_send.content_type: + system_properties.append(("$.ct", str(message_to_send.content_type))) + if message_to_send.content_encoding: + system_properties.append(("$.ce", str(message_to_send.content_encoding))) + if message_to_send.iothub_interface_id: + system_properties.append(("$.ifid", str(message_to_send.iothub_interface_id))) + if message_to_send.expiry_time_utc: + system_properties.append(( + "$.exp", + message_to_send.expiry_time_utc.isoformat() if isinstance(message_to_send.expiry_time_utc, date) else message_to_send.expiry_time_utc)) + system_properties_encoded = version_compat.urlencode(system_properties, + quote_via=(urllib.parse.quote)) + topic += system_properties_encoded + if message_to_send.custom_properties and len(message_to_send.custom_properties) > 0: + if system_properties: + if len(system_properties) > 0: + topic += "&" + custom_prop_seq = [(str(i[0]), str(i[1])) for i in list(message_to_send.custom_properties.items())] + custom_prop_seq.sort() + keys = [i[0] for i in custom_prop_seq] + if len(keys) != len(set(keys)): + raise ValueError("Duplicate keys in custom properties!") + user_properties_encoded = version_compat.urlencode(custom_prop_seq, + quote_via=(urllib.parse.quote)) + topic += user_properties_encoded + return topic + + +def _extract_properties(properties_str): + """Return a dictionary of properties from a string in the format + ${key1}={value1}&${key2}={value2}...&${keyn}={valuen} + """ + d = {} + kv_pairs = properties_str.split("&") + for entry in kv_pairs: + pair = entry.split("=") + key = urllib.parse.unquote(pair[0]).lstrip("$") + value = urllib.parse.unquote(pair[1]) + d[key] = value + + return d diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_events_iothub.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_events_iothub.py new file mode 100644 index 0000000..f65769c --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_events_iothub.py @@ -0,0 +1,50 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/pipeline_events_iothub.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2120 bytes +from azure.iot.device.common.pipeline import PipelineEvent + +class C2DMessageEvent(PipelineEvent): + __doc__ = "\n A PipelineEvent object which represents an incoming C2D event. This object is probably\n created by some converter stage based on a protocol-specific event\n " + + def __init__(self, message): + """ + Initializer for C2DMessageEvent objects. + + :param Message message: The Message object for the message that was received. + """ + super(C2DMessageEvent, self).__init__() + self.message = message + + +class InputMessageEvent(PipelineEvent): + __doc__ = "\n A PipelineEvent object which represents an incoming input message event. This object is probably\n created by some converter stage based on a protocol-specific event\n " + + def __init__(self, message): + """ + Initializer for InputMessageEvent objects. + + :param Message message: The Message object for the message that was received. This message + is expected to have had the .input_name attribute set + """ + super(InputMessageEvent, self).__init__() + self.message = message + + +class MethodRequestEvent(PipelineEvent): + __doc__ = "\n A PipelineEvent object which represents an incoming MethodRequest event.\n This object is probably created by some converter stage based on a protocol-specific event.\n " + + def __init__(self, method_request): + super(MethodRequestEvent, self).__init__() + self.method_request = method_request + + +class TwinDesiredPropertiesPatchEvent(PipelineEvent): + __doc__ = "\n A PipelineEvent object which represents an incoming twin desired properties patch. This\n object is probably created by some converter stage based on a protocol-specific event.\n " + + def __init__(self, patch): + super(TwinDesiredPropertiesPatchEvent, self).__init__() + self.patch = patch diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub.py new file mode 100644 index 0000000..8adee2e --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub.py @@ -0,0 +1,83 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 4380 bytes +from azure.iot.device.common.pipeline import PipelineOperation + +class SendD2CMessageOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to send a telemetry message to an IoTHub or EdegHub server.\n\n This operation is in the group of IoTHub operations because it is very specific to the IoTHub client\n " + + def __init__(self, message, callback): + """ + Initializer for SendD2CMessageOperation objects. + + :param Message message: The message that we're sending to the service + :param Function callback: The function that gets called when this operation is complete or has failed. + The callback function must accept A PipelineOperation object which indicates the specific operation which + has completed or failed. + """ + super(SendD2CMessageOperation, self).__init__(callback=callback) + self.message = message + + +class SendOutputMessageOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to send an output message to an EdgeHub server.\n\n This operation is in the group of IoTHub operations because it is very specific to the IoTHub client\n " + + def __init__(self, message, callback): + """ + Initializer for SendOutputMessageOperation objects. + + :param Message message: The output message that we're sending to the service. The name of the output is + expected to be stored in the output_name attribute of this object + :param Function callback: The function that gets called when this operation is complete or has failed. + The callback function must accept A PipelineOperation object which indicates the specific operation which + has completed or failed. + """ + super(SendOutputMessageOperation, self).__init__(callback=callback) + self.message = message + + +class SendMethodResponseOperation(PipelineOperation): + __doc__ = "\n A PipleineOperation object which contains arguments used to send a method response to an IoTHub or EdgeHub server.\n\n This operation is in the group of IoTHub operations because it is very specific to the IoTHub client.\n " + + def __init__(self, method_response, callback): + """ + Initializer for SendMethodResponseOperation objects. + + :param method_response: The method response to be sent to IoTHub/EdgeHub + :type method_response: MethodResponse + :param callback: The function that gets called when this operation is complete or has failed. + The callback function must accept a PipelineOperation object which indicates the specific operation has which + has completed or failed. + :type callback: Function/callable + """ + super(SendMethodResponseOperation, self).__init__(callback=callback) + self.method_response = method_response + + +class GetTwinOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which represents a request to get a device twin or a module twin from an Azure\n IoT Hub or Azure Iot Edge Hub service.\n\n :ivar twin: Upon completion, this contains the twin which was retrieved from the service.\n :type twin: Twin\n " + + def __init__(self, callback): + """ + Initializer for GetTwinOperation objects. + """ + super(GetTwinOperation, self).__init__(callback=callback) + self.twin = None + + +class PatchTwinReportedPropertiesOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to send a reported properties patch to the Azure\n IoT Hub or Azure IoT Edge Hub service.\n " + + def __init__(self, patch, callback): + """ + Initializer for PatchTwinReportedPropertiesOperation object + + :param patch: The reported properties patch to send to the service. + :type patch: dict, str, int, float, bool, or None (JSON compatible values) + """ + super(PatchTwinReportedPropertiesOperation, self).__init__(callback=callback) + self.patch = patch diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub_http.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub_http.py new file mode 100644 index 0000000..e7ec411 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub_http.py @@ -0,0 +1,72 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/pipeline_ops_iothub_http.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3939 bytes +from azure.iot.device.common.pipeline import PipelineOperation + +class MethodInvokeOperation(PipelineOperation): + __doc__ = "\n A PipleineOperation object which contains arguments used to send a method invoke to an IoTHub or EdgeHub server.\n\n This operation is in the group of EdgeHub operations because it is very specific to the EdgeHub client.\n " + + def __init__(self, target_device_id, target_module_id, method_params, callback): + """ + Initializer for MethodInvokeOperation objects. + + :param str target_device_id: The device id of the target device/module + :param str target_module_id: The module id of the target module + :param method_params: The parameters used to invoke the method, as defined by the IoT Hub specification. + :param callback: The function that gets called when this operation is complete or has failed. + The callback function must accept a PipelineOperation object which indicates the specific operation has which + has completed or failed. + :type callback: Function/callable + """ + super(MethodInvokeOperation, self).__init__(callback=callback) + self.target_device_id = target_device_id + self.target_module_id = target_module_id + self.method_params = method_params + self.method_response = None + + +class GetStorageInfoOperation(PipelineOperation): + __doc__ = "\n A PipleineOperation object which contains arguments used to get the storage information from IoT Hub.\n " + + def __init__(self, blob_name, callback): + """ + Initializer for GetStorageInfo objects. + + :param str blob_name: The name of the blob that will be created in Azure Storage + :param callback: The function that gets called when this operation is complete or has failed. + The callback function must accept a PipelineOperation object which indicates the specific operation has which + has completed or failed. + :type callback: Function/callable + + :ivar dict storage_info: Upon completion, this contains the storage information which was retrieved from the service. + """ + super(GetStorageInfoOperation, self).__init__(callback=callback) + self.blob_name = blob_name + self.storage_info = None + + +class NotifyBlobUploadStatusOperation(PipelineOperation): + __doc__ = "\n A PipleineOperation object which contains arguments used to get the storage information from IoT Hub.\n " + + def __init__(self, correlation_id, is_success, status_code, status_description, callback): + """ + Initializer for GetStorageInfo objects. + + :param str correlation_id: Provided by IoT Hub on get_storage_info_for_blob request. + :param bool is_success: A boolean that indicates whether the file was uploaded successfully. + :param int request_status_code: A numeric status code that is the status for the upload of the fiel to storage. + :param str status_description: A description that corresponds to the status_code. + :param callback: The function that gets called when this operation is complete or has failed. + The callback function must accept a PipelineOperation object which indicates the specific operation has which + has completed or failed. + :type callback: Function/callable + """ + super(NotifyBlobUploadStatusOperation, self).__init__(callback=callback) + self.correlation_id = correlation_id + self.is_success = is_success + self.request_status_code = status_code + self.status_description = status_description diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub.py new file mode 100644 index 0000000..f36aa4d --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub.py @@ -0,0 +1,128 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 9111 bytes +import json, logging +from azure.iot.device.common.pipeline import pipeline_events_base, pipeline_ops_base, PipelineStage, pipeline_thread +from azure.iot.device import exceptions +from azure.iot.device.common import handle_exceptions +from azure.iot.device.common.callable_weak_method import CallableWeakMethod +from . import pipeline_events_iothub, pipeline_ops_iothub +from . import constant +logger = logging.getLogger(__name__) + +class EnsureDesiredPropertiesStage(PipelineStage): + __doc__ = "\n Pipeline stage Responsible for making sure that desired properties are always kept up to date.\n It does this by sending diwn a GetTwinOperation after a connection is reestablished, and, if\n the desired properties have changed since the last time a patch was received, it will send up\n an artificial patch event to send those updated properties to the app.\n " + + def __init__(self): + self.last_version_seen = None + self.pending_get_request = None + super(EnsureDesiredPropertiesStage, self).__init__() + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_base.EnableFeatureOperation): + if op.feature_name == constant.TWIN_PATCHES: + logger.debug("{}: enabling twin patches. setting last_version_seen".format(self.name)) + self.last_version_seen = -1 + self.send_op_down(op) + + @pipeline_thread.runs_on_pipeline_thread + def _ensure_get_op(self): + """ + Function which makes sure we have a GetTwin operation in progress. If we've + already sent one down and we're waiting for it to return, we don't want to send + a new one down. This is because layers below us (especially CoordinateRequestAndResponseStage) + will do everything they can to ensure we get a response on the already-pending + GetTwinOperation. + """ + if not self.pending_get_request: + logger.info("{}: sending twin GET to ensure freshness".format(self.name)) + self.pending_get_request = pipeline_ops_iothub.GetTwinOperation(callback=(CallableWeakMethod(self, "_on_get_twin_complete"))) + self.send_op_down(self.pending_get_request) + else: + logger.debug("{}: Outstanding twin GET already exists. Not sending anything".format(self.name)) + + @pipeline_thread.runs_on_pipeline_thread + def _on_get_twin_complete(self, op, error): + """ + Function that gets called when a GetTwinOperation _that_we_initiated_ is complete. + This is where we compare $version values and decide if we want to create an artificial + TwinDesiredPropertiesPatchEvent or not. + """ + self.pending_get_request = None + if error: + logger.debug("{}: Twin GET failed with error {}. Resubmitting.".format(self, error)) + self._ensure_get_op() + else: + logger.debug("{} Twin GET response received. Checking versions".format(self)) + new_version = op.twin["desired"]["$version"] + logger.debug("{}: old version = {}, new version = {}".format(self.name, self.last_version_seen, new_version)) + if self.last_version_seen != new_version: + logger.debug("{}: Version changed. Sending up new patch event".format(self.name)) + self.last_version_seen = new_version + self.send_event_up(pipeline_events_iothub.TwinDesiredPropertiesPatchEvent(op.twin["desired"])) + + @pipeline_thread.runs_on_pipeline_thread + def _handle_pipeline_event(self, event): + if isinstance(event, pipeline_events_iothub.TwinDesiredPropertiesPatchEvent): + version = event.patch["$version"] + logger.debug("{}: Desired patch received. Saving $version={}".format(self.name, version)) + self.last_version_seen = version + else: + if isinstance(event, pipeline_events_base.ConnectedEvent): + if self.last_version_seen: + logger.info("{}: Reconnected. Getting twin") + self._ensure_get_op() + self.send_event_up(event) + + +class TwinRequestResponseStage(PipelineStage): + __doc__ = "\n PipelineStage which handles twin operations. In particular, it converts twin GET and PATCH\n operations into RequestAndResponseOperation operations. This is done at the IoTHub level because\n there is nothing protocol-specific about this code. The protocol-specific implementation\n for twin requests and responses is handled inside IoTHubMQTTTranslationStage, when it converts\n the RequestOperation to a protocol-specific send operation and when it converts the\n protocol-specific receive event into an ResponseEvent event.\n " + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + + def map_twin_error(error, twin_op): + if error: + return error + if twin_op.status_code >= 300: + logger.info("Error {} received from twin operation".format(twin_op.status_code)) + logger.info("response body: {}".format(twin_op.response_body)) + return exceptions.ServiceError("twin operation returned status {}".format(twin_op.status_code)) + + if isinstance(op, pipeline_ops_iothub.GetTwinOperation): + op_waiting_for_response = op + + def on_twin_response(op, error): + logger.debug("{}({}): Got response for GetTwinOperation".format(self.name, op.name)) + error = map_twin_error(error=error, twin_op=op) + if not error: + op_waiting_for_response.twin = json.loads(op.response_body.decode("utf-8")) + op_waiting_for_response.complete(error=error) + + self.send_op_down(pipeline_ops_base.RequestAndResponseOperation(request_type=(constant.TWIN), + method="GET", + resource_location="/", + request_body=" ", + callback=on_twin_response)) + else: + if isinstance(op, pipeline_ops_iothub.PatchTwinReportedPropertiesOperation): + op_waiting_for_response = op + + def on_twin_response(op, error): + logger.debug("{}({}): Got response for PatchTwinReportedPropertiesOperation operation".format(self.name, op.name)) + error = map_twin_error(error=error, twin_op=op) + op_waiting_for_response.complete(error=error) + + logger.debug("{}({}): Sending reported properties patch: {}".format(self.name, op.name, op.patch)) + self.send_op_down(pipeline_ops_base.RequestAndResponseOperation(request_type=(constant.TWIN), + method="PATCH", + resource_location="/properties/reported/", + request_body=(json.dumps(op.patch)), + callback=on_twin_response)) + else: + super(TwinRequestResponseStage, self)._run_op(op) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_http.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_http.py new file mode 100644 index 0000000..6de27c2 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_http.py @@ -0,0 +1,114 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_http.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 8258 bytes +import logging, json +import six.moves.urllib as urllib +from azure.iot.device.common.pipeline import pipeline_events_base, pipeline_ops_base, pipeline_ops_http, PipelineStage, pipeline_thread +from . import pipeline_ops_iothub, pipeline_ops_iothub_http, http_path_iothub, http_map_error +from azure.iot.device import exceptions +from azure.iot.device import constant as pkg_constant +from azure.iot.device import user_agent +logger = logging.getLogger(__name__) + +@pipeline_thread.runs_on_pipeline_thread +def map_http_error(error, http_op): + if error: + return error + if http_op.status_code >= 300: + translated_error = http_map_error.translate_error(http_op.status_code, http_op.reason) + return exceptions.ServiceError("HTTP operation returned: {} {}".format(http_op.status_code, translated_error)) + + +class IoTHubHTTPTranslationStage(PipelineStage): + __doc__ = "\n PipelineStage which converts other Iot and EdgeHub operations into HTTP operations. This stage also\n converts http pipeline events into Iot and EdgeHub pipeline events.\n " + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_iothub_http.MethodInvokeOperation): + logger.debug("{}({}): Translating Method Invoke Operation for HTTP.".format(self.name, op.name)) + query_params = "api-version={apiVersion}".format(apiVersion=(pkg_constant.IOTHUB_API_VERSION)) + body = json.dumps(op.method_params) + path = http_path_iothub.get_method_invoke_path(op.target_device_id, op.target_module_id) + x_ms_edge_string = "{deviceId}/{moduleId}".format(deviceId=(self.pipeline_root.pipeline_configuration.device_id), + moduleId=(self.pipeline_root.pipeline_configuration.module_id)) + user_agent_string = urllib.parse.quote_plus(user_agent.get_iothub_user_agent() + str(self.pipeline_root.pipeline_configuration.product_info)) + headers = {'Host':(self.pipeline_root.pipeline_configuration).gateway_hostname, + 'Content-Type':"application/json", + 'Content-Length':len(str(body)), + 'x-ms-edge-moduleId':x_ms_edge_string, + 'User-Agent':user_agent_string} + op_waiting_for_response = op + + def on_request_response(op, error): + logger.debug("{}({}): Got response for MethodInvokeOperation".format(self.name, op.name)) + error = map_http_error(error=error, http_op=op) + if not error: + op_waiting_for_response.method_response = json.loads(op.response_body.decode("utf-8")) + op_waiting_for_response.complete(error=error) + + self.send_op_down(pipeline_ops_http.HTTPRequestAndResponseOperation(method="POST", + path=path, + headers=headers, + body=body, + query_params=query_params, + callback=on_request_response)) + else: + if isinstance(op, pipeline_ops_iothub_http.GetStorageInfoOperation): + logger.debug("{}({}): Translating Get Storage Info Operation to HTTP.".format(self.name, op.name)) + query_params = "api-version={apiVersion}".format(apiVersion=(pkg_constant.IOTHUB_API_VERSION)) + path = http_path_iothub.get_storage_info_for_blob_path(self.pipeline_root.pipeline_configuration.device_id) + body = json.dumps({"blobName": (op.blob_name)}) + user_agent_string = urllib.parse.quote_plus(user_agent.get_iothub_user_agent() + str(self.pipeline_root.pipeline_configuration.product_info)) + headers = {'Host':(self.pipeline_root.pipeline_configuration).hostname, + 'Accept':"application/json", + 'Content-Type':"application/json", + 'Content-Length':len(str(body)), + 'User-Agent':user_agent_string} + op_waiting_for_response = op + + def on_request_response(op, error): + logger.debug("{}({}): Got response for GetStorageInfoOperation".format(self.name, op.name)) + error = map_http_error(error=error, http_op=op) + if not error: + op_waiting_for_response.storage_info = json.loads(op.response_body.decode("utf-8")) + op_waiting_for_response.complete(error=error) + + self.send_op_down(pipeline_ops_http.HTTPRequestAndResponseOperation(method="POST", + path=path, + headers=headers, + body=body, + query_params=query_params, + callback=on_request_response)) + else: + if isinstance(op, pipeline_ops_iothub_http.NotifyBlobUploadStatusOperation): + logger.debug("{}({}): Translating Get Storage Info Operation to HTTP.".format(self.name, op.name)) + query_params = "api-version={apiVersion}".format(apiVersion=(pkg_constant.IOTHUB_API_VERSION)) + path = http_path_iothub.get_notify_blob_upload_status_path(self.pipeline_root.pipeline_configuration.device_id) + body = json.dumps({'correlationId':op.correlation_id, + 'isSuccess':op.is_success, + 'statusCode':op.request_status_code, + 'statusDescription':op.status_description}) + user_agent_string = urllib.parse.quote_plus(user_agent.get_iothub_user_agent() + str(self.pipeline_root.pipeline_configuration.product_info)) + headers = {'Host':(self.pipeline_root.pipeline_configuration).hostname, + 'Content-Type':"application/json; charset=utf-8", + 'Content-Length':len(str(body)), + 'User-Agent':user_agent_string} + op_waiting_for_response = op + + def on_request_response(op, error): + logger.debug("{}({}): Got response for GetStorageInfoOperation".format(self.name, op.name)) + error = map_http_error(error=error, http_op=op) + op_waiting_for_response.complete(error=error) + + self.send_op_down(pipeline_ops_http.HTTPRequestAndResponseOperation(method="POST", + path=path, + headers=headers, + body=body, + query_params=query_params, + callback=on_request_response)) + else: + self.send_op_down(op) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_mqtt.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_mqtt.py new file mode 100644 index 0000000..f7f8416 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_mqtt.py @@ -0,0 +1,149 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/pipeline/pipeline_stages_iothub_mqtt.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 11208 bytes +import logging, json +from six.moves import urllib +from azure.iot.device.common import version_compat +from azure.iot.device.common.pipeline import pipeline_events_base, pipeline_ops_base, pipeline_ops_mqtt, pipeline_events_mqtt, PipelineStage, pipeline_thread +from azure.iot.device.iothub.models import Message, MethodRequest +from . import pipeline_ops_iothub, pipeline_events_iothub, mqtt_topic_iothub +from . import constant as pipeline_constant +from . import exceptions as pipeline_exceptions +from azure.iot.device import constant as pkg_constant +from azure.iot.device import user_agent +logger = logging.getLogger(__name__) + +class IoTHubMQTTTranslationStage(PipelineStage): + __doc__ = "\n PipelineStage which converts other Iot and IoTHub operations into MQTT operations. This stage also\n converts mqtt pipeline events into Iot and IoTHub pipeline events.\n " + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_base.InitializePipelineOperation): + if self.pipeline_root.pipeline_configuration.module_id: + client_id = "{}/{}".format(self.pipeline_root.pipeline_configuration.device_id, self.pipeline_root.pipeline_configuration.module_id) + else: + client_id = self.pipeline_root.pipeline_configuration.device_id + query_param_seq = [] + custom_product_info = str(self.pipeline_root.pipeline_configuration.product_info) + if custom_product_info.startswith(pkg_constant.DIGITAL_TWIN_PREFIX): + query_param_seq.append(("api-version", pkg_constant.DIGITAL_TWIN_API_VERSION)) + query_param_seq.append(("DeviceClientType", user_agent.get_iothub_user_agent())) + query_param_seq.append(( + pkg_constant.DIGITAL_TWIN_QUERY_HEADER, custom_product_info)) + else: + query_param_seq.append(("api-version", pkg_constant.IOTHUB_API_VERSION)) + query_param_seq.append(( + "DeviceClientType", user_agent.get_iothub_user_agent() + custom_product_info)) + username = "{hostname}/{client_id}/?{query_params}".format(hostname=(self.pipeline_root.pipeline_configuration.hostname), + client_id=client_id, + query_params=version_compat.urlencode(query_param_seq, + quote_via=(urllib.parse.quote))) + op.username = username + op.client_id = client_id + self.send_op_down(op) + else: + if isinstance(op, pipeline_ops_iothub.SendD2CMessageOperation) or isinstance(op, pipeline_ops_iothub.SendOutputMessageOperation): + telemetry_topic = mqtt_topic_iothub.get_telemetry_topic_for_publish(device_id=(self.pipeline_root.pipeline_configuration.device_id), + module_id=(self.pipeline_root.pipeline_configuration.module_id)) + topic = mqtt_topic_iothub.encode_message_properties_in_topic(op.message, telemetry_topic) + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTPublishOperation), + topic=topic, + payload=(op.message.data)) + self.send_op_down(worker_op) + else: + if isinstance(op, pipeline_ops_iothub.SendMethodResponseOperation): + topic = mqtt_topic_iothub.get_method_topic_for_publish(op.method_response.request_id, op.method_response.status) + payload = json.dumps(op.method_response.payload) + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTPublishOperation), + topic=topic, + payload=payload) + self.send_op_down(worker_op) + else: + if isinstance(op, pipeline_ops_base.EnableFeatureOperation): + topic = self._get_feature_subscription_topic(op.feature_name) + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTSubscribeOperation), + topic=topic) + self.send_op_down(worker_op) + else: + if isinstance(op, pipeline_ops_base.DisableFeatureOperation): + topic = self._get_feature_subscription_topic(op.feature_name) + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTUnsubscribeOperation), + topic=topic) + self.send_op_down(worker_op) + else: + if isinstance(op, pipeline_ops_base.RequestOperation): + if op.request_type == pipeline_constant.TWIN: + topic = mqtt_topic_iothub.get_twin_topic_for_publish(method=(op.method), + resource_location=(op.resource_location), + request_id=(op.request_id)) + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTPublishOperation), + topic=topic, + payload=(op.request_body)) + self.send_op_down(worker_op) + else: + raise pipeline_exceptions.OperationError("RequestOperation request_type {} not supported".format(op.request_type)) + else: + super(IoTHubMQTTTranslationStage, self)._run_op(op) + + @pipeline_thread.runs_on_pipeline_thread + def _get_feature_subscription_topic(self, feature): + if feature == pipeline_constant.C2D_MSG: + return mqtt_topic_iothub.get_c2d_topic_for_subscribe(self.pipeline_root.pipeline_configuration.device_id) + if feature == pipeline_constant.INPUT_MSG: + return mqtt_topic_iothub.get_input_topic_for_subscribe(self.pipeline_root.pipeline_configuration.device_id, self.pipeline_root.pipeline_configuration.module_id) + if feature == pipeline_constant.METHODS: + return mqtt_topic_iothub.get_method_topic_for_subscribe() + if feature == pipeline_constant.TWIN: + return mqtt_topic_iothub.get_twin_response_topic_for_subscribe() + if feature == pipeline_constant.TWIN_PATCHES: + return mqtt_topic_iothub.get_twin_patch_topic_for_subscribe() + logger.error("Cannot retrieve MQTT topic for subscription to invalid feature") + raise pipeline_exceptions.OperationError("Trying to enable/disable invalid feature - {}".format(feature)) + + @pipeline_thread.runs_on_pipeline_thread + def _handle_pipeline_event(self, event): + """ + Pipeline Event handler function to convert incoming MQTT messages into the appropriate IoTHub + events, based on the topic of the message + """ + if isinstance(event, pipeline_events_mqtt.IncomingMQTTMessageEvent): + topic = event.topic + device_id = self.pipeline_root.pipeline_configuration.device_id + module_id = self.pipeline_root.pipeline_configuration.module_id + if mqtt_topic_iothub.is_c2d_topic(topic, device_id): + message = Message(event.payload) + mqtt_topic_iothub.extract_message_properties_from_topic(topic, message) + self.send_event_up(pipeline_events_iothub.C2DMessageEvent(message)) + else: + if mqtt_topic_iothub.is_input_topic(topic, device_id, module_id): + message = Message(event.payload) + mqtt_topic_iothub.extract_message_properties_from_topic(topic, message) + message.input_name = mqtt_topic_iothub.get_input_name_from_topic(topic) + self.send_event_up(pipeline_events_iothub.InputMessageEvent(message)) + else: + if mqtt_topic_iothub.is_method_topic(topic): + request_id = mqtt_topic_iothub.get_method_request_id_from_topic(topic) + method_name = mqtt_topic_iothub.get_method_name_from_topic(topic) + method_received = MethodRequest(request_id=request_id, + name=method_name, + payload=(json.loads(event.payload.decode("utf-8")))) + self.send_event_up(pipeline_events_iothub.MethodRequestEvent(method_received)) + else: + if mqtt_topic_iothub.is_twin_response_topic(topic): + request_id = mqtt_topic_iothub.get_twin_request_id_from_topic(topic) + status_code = int(mqtt_topic_iothub.get_twin_status_code_from_topic(topic)) + self.send_event_up(pipeline_events_base.ResponseEvent(request_id=request_id, + status_code=status_code, + response_body=(event.payload))) + else: + if mqtt_topic_iothub.is_twin_desired_property_patch_topic(topic): + self.send_event_up(pipeline_events_iothub.TwinDesiredPropertiesPatchEvent(patch=(json.loads(event.payload.decode("utf-8"))))) + else: + logger.debug("Unknown topic: {} passing up to next handler".format(topic)) + self.send_event_up(event) + else: + self.send_event_up(event) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_clients.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_clients.py new file mode 100644 index 0000000..915291c --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_clients.py @@ -0,0 +1,627 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/sync_clients.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 33436 bytes +"""This module contains user-facing synchronous clients for the +Azure IoTHub Device SDK for Python. +""" +import logging, deprecation +from .abstract_clients import AbstractIoTHubClient, AbstractIoTHubDeviceClient, AbstractIoTHubModuleClient +from .models import Message +from .inbox_manager import InboxManager +from .sync_inbox import SyncClientInbox, InboxEmpty +from . import sync_handler_manager +from .pipeline import constant as pipeline_constant +from .pipeline import exceptions as pipeline_exceptions +from azure.iot.device import exceptions +from azure.iot.device.common.evented_callback import EventedCallback +from azure.iot.device.common.callable_weak_method import CallableWeakMethod +from azure.iot.device import constant as device_constant +logger = logging.getLogger(__name__) + +def handle_result(callback): + try: + return callback.wait_for_completion() + except pipeline_exceptions.ConnectionDroppedError as e: + try: + raise exceptions.ConnectionDroppedError(message="Lost connection to IoTHub", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ConnectionFailedError as e: + try: + raise exceptions.ConnectionFailedError(message="Could not connect to IoTHub", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.UnauthorizedError as e: + try: + raise exceptions.CredentialError(message="Credentials invalid, could not connect", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ProtocolClientError as e: + try: + raise exceptions.ClientError(message="Error in the IoTHub client", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.TlsExchangeAuthError as e: + try: + raise exceptions.ClientError(message="Error in the IoTHub client due to TLS exchanges.", + cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ProtocolProxyError as e: + try: + raise exceptions.ClientError(message="Error in the IoTHub client raised due to proxy connections.", + cause=e) + finally: + e = None + del e + + except Exception as e: + try: + raise exceptions.ClientError(message="Unexpected failure", cause=e) + finally: + e = None + del e + + +class GenericIoTHubClient(AbstractIoTHubClient): + __doc__ = "A superclass representing a generic synchronous client.\n This class needs to be extended for specific clients.\n " + + def __init__(self, **kwargs): + """Initializer for a generic synchronous client. + + This initializer should not be called directly. + Instead, use one of the 'create_from_' classmethods to instantiate + + :param mqtt_pipeline: The MQTTPipeline used for the client + :type mqtt_pipeline: :class:`azure.iot.device.iothub.pipeline.MQTTPipeline` + :param http_pipeline: The HTTPPipeline used for the client + :type http_pipeline: :class:`azure.iot.device.iothub.pipeline.HTTPPipeline` + """ + (super(GenericIoTHubClient, self).__init__)(**kwargs) + self._inbox_manager = InboxManager(inbox_type=SyncClientInbox) + self._handler_manager = sync_handler_manager.SyncHandlerManager(self._inbox_manager) + self._mqtt_pipeline.on_connected = CallableWeakMethod(self, "_on_connected") + self._mqtt_pipeline.on_disconnected = CallableWeakMethod(self, "_on_disconnected") + self._mqtt_pipeline.on_method_request_received = CallableWeakMethod(self._inbox_manager, "route_method_request") + self._mqtt_pipeline.on_twin_patch_received = CallableWeakMethod(self._inbox_manager, "route_twin_patch") + + def _enable_feature(self, feature_name): + """Enable an Azure IoT Hub feature. + + This is a synchronous call, meaning that this function will not return until the feature + has been enabled. + + :param feature_name: The name of the feature to enable. + See azure.iot.device.common.pipeline.constant for possible values + """ + logger.info("Enabling feature:" + feature_name + "...") + if not self._mqtt_pipeline.feature_enabled[feature_name]: + callback = EventedCallback() + self._mqtt_pipeline.enable_feature(feature_name, callback=callback) + callback.wait_for_completion() + logger.info("Successfully enabled feature:" + feature_name) + else: + logger.info("Feature ({}) already disabled - skipping".format(feature_name)) + + def _disable_feature(self, feature_name): + """Disable an Azure IoT Hub feature + + This is a synchronous call, meaning that this function will not return until the feature + has been disabled. + + :param feature_name: The name of the feature to disable. + See azure.iot.device.common.pipeline.constant for possible values + """ + logger.info("Disabling feature: {}...".format(feature_name)) + if self._mqtt_pipeline.feature_enabled[feature_name]: + callback = EventedCallback() + self._mqtt_pipeline.disable_feature(feature_name, callback=callback) + callback.wait_for_completion() + logger.info("Successfully disabled feature: {}".format(feature_name)) + else: + logger.info("Feature ({}) already disabled - skipping".format(feature_name)) + + def connect(self): + """Connects the client to an Azure IoT Hub or Azure IoT Edge Hub instance. + + The destination is chosen based on the credentials passed via the auth_provider parameter + that was provided when this object was initialized. + + This is a synchronous call, meaning that this function will not return until the connection + to the service has been completely established. + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Connecting to Hub...") + callback = EventedCallback() + self._mqtt_pipeline.connect(callback=callback) + handle_result(callback) + logger.info("Successfully connected to Hub") + + def disconnect(self): + """Disconnect the client from the Azure IoT Hub or Azure IoT Edge Hub instance. + + It is recommended that you make sure to call this function when you are completely done + with the your client instance. + + This is a synchronous call, meaning that this function will not return until the connection + to the service has been completely closed. + + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Disconnecting from Hub...") + logger.debug("Executing initial disconnect") + callback = EventedCallback() + self._mqtt_pipeline.disconnect(callback=callback) + handle_result(callback) + logger.debug("Successfully executed initial disconnect") + logger.debug("Stopping handlers...") + self._handler_manager.stop() + logger.debug("Successfully stopped handlers") + logger.debug("Executing secondary disconnect...") + callback = EventedCallback() + self._mqtt_pipeline.disconnect(callback=callback) + handle_result(callback) + logger.debug("Successfully executed secondary disconnect") + logger.info("Successfully disconnected from Hub") + + def update_sastoken(self, sastoken): + """ + Update the client's SAS Token used for authentication, then reauthorizes the connection. + + This API can only be used if the client was initially created with a SAS Token. + Note also that this API may return before the reauthorization/reconnection is completed. + This means that some errors that may occur as part of the reconnection could occur in the + background, and will not be raised by this method. + + :param str sastoken: The new SAS Token string for the client to use + + :raises: :class:`azure.iot.device.exceptions.ClientError` if the client was not initially + created with a SAS token. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + :raises: ValueError if the sastoken parameter is invalid + """ + self._replace_user_supplied_sastoken(sastoken) + logger.info("Reauthorizing connection with Hub...") + callback = EventedCallback() + self._mqtt_pipeline.reauthorize_connection(callback=callback) + handle_result(callback) + logger.info("Successfully reauthorized connection to Hub") + + def send_message(self, message): + """Sends a message to the default events endpoint on the Azure IoT Hub or Azure IoT Edge Hub instance. + + This is a synchronous event, meaning that this function will not return until the event + has been sent to the service and the service has acknowledged receipt of the event. + + If the connection to the service has not previously been opened by a call to connect, this + function will open the connection before sending the event. + + :param message: The actual message to send. Anything passed that is not an instance of the + Message class will be converted to Message object. + :type message: :class:`azure.iot.device.Message` or str + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + :raises: ValueError if the message fails size validation. + """ + if not isinstance(message, Message): + message = Message(message) + if message.get_size() > device_constant.TELEMETRY_MESSAGE_SIZE_LIMIT: + raise ValueError("Size of telemetry message can not exceed 256 KB.") + logger.info("Sending message to Hub...") + callback = EventedCallback() + self._mqtt_pipeline.send_message(message, callback=callback) + handle_result(callback) + logger.info("Successfully sent message to Hub") + + @deprecation.deprecated(deprecated_in="2.3.0", + current_version=(device_constant.VERSION), + details="We recommend that you use the .on_method_request_received property to set a handler instead") + def receive_method_request(self, method_name=None, block=True, timeout=None): + """Receive a method request via the Azure IoT Hub or Azure IoT Edge Hub. + + :param str method_name: Optionally provide the name of the method to receive requests for. + If this parameter is not given, all methods not already being specifically targeted by + a different request to receive_method will be received. + :param bool block: Indicates if the operation should block until a request is received. + :param int timeout: Optionally provide a number of seconds until blocking times out. + + :returns: MethodRequest object representing the received method request, or None if + no method request has been received by the end of the blocking period. + """ + self._check_receive_mode_is_api() + if not self._mqtt_pipeline.feature_enabled[pipeline_constant.METHODS]: + self._enable_feature(pipeline_constant.METHODS) + method_inbox = self._inbox_manager.get_method_request_inbox(method_name) + logger.info("Waiting for method request...") + try: + method_request = method_inbox.get(block=block, timeout=timeout) + except InboxEmpty: + method_request = None + + logger.info("Received method request") + return method_request + + def send_method_response(self, method_response): + """Send a response to a method request via the Azure IoT Hub or Azure IoT Edge Hub. + + This is a synchronous event, meaning that this function will not return until the event + has been sent to the service and the service has acknowledged receipt of the event. + + If the connection to the service has not previously been opened by a call to connect, this + function will open the connection before sending the event. + + :param method_response: The MethodResponse to send. + :type method_response: :class:`azure.iot.device.MethodResponse` + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Sending method response to Hub...") + callback = EventedCallback() + self._mqtt_pipeline.send_method_response(method_response, callback=callback) + handle_result(callback) + logger.info("Successfully sent method response to Hub") + + def get_twin(self): + """ + Gets the device or module twin from the Azure IoT Hub or Azure IoT Edge Hub service. + + This is a synchronous call, meaning that this function will not return until the twin + has been retrieved from the service. + + :returns: Complete Twin as a JSON dict + :rtype: dict + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + if not self._mqtt_pipeline.feature_enabled[pipeline_constant.TWIN]: + self._enable_feature(pipeline_constant.TWIN) + callback = EventedCallback(return_arg_name="twin") + self._mqtt_pipeline.get_twin(callback=callback) + twin = handle_result(callback) + logger.info("Successfully retrieved twin") + return twin + + def patch_twin_reported_properties(self, reported_properties_patch): + """ + Update reported properties with the Azure IoT Hub or Azure IoT Edge Hub service. + + This is a synchronous call, meaning that this function will not return until the patch + has been sent to the service and acknowledged. + + If the service returns an error on the patch operation, this function will raise the + appropriate error. + + :param reported_properties_patch: Twin Reported Properties patch as a JSON dict + :type reported_properties_patch: dict + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + if not self._mqtt_pipeline.feature_enabled[pipeline_constant.TWIN]: + self._enable_feature(pipeline_constant.TWIN) + callback = EventedCallback() + self._mqtt_pipeline.patch_twin_reported_properties(patch=reported_properties_patch, + callback=callback) + handle_result(callback) + logger.info("Successfully patched twin") + + @deprecation.deprecated(deprecated_in="2.3.0", + current_version=(device_constant.VERSION), + details="We recommend that you use the .on_twin_desired_properties_patch_received property to set a handler instead") + def receive_twin_desired_properties_patch(self, block=True, timeout=None): + """ + Receive a desired property patch via the Azure IoT Hub or Azure IoT Edge Hub. + + This is a synchronous call, which means the following: + 1. If block=True, this function will block until one of the following happens: + * a desired proprety patch is received from the Azure IoT Hub or Azure IoT Edge Hub. + * the timeout period, if provided, elapses. If a timeout happens, this function will + raise a InboxEmpty exception + 2. If block=False, this function will return any desired property patches which may have + been received by the pipeline, but not yet returned to the application. If no + desired property patches have been received by the pipeline, this function will raise + an InboxEmpty exception + + :param bool block: Indicates if the operation should block until a request is received. + :param int timeout: Optionally provide a number of seconds until blocking times out. + + :returns: Twin Desired Properties patch as a JSON dict, or None if no patch has been + received by the end of the blocking period + :rtype: dict or None + """ + self._check_receive_mode_is_api() + if not self._mqtt_pipeline.feature_enabled[pipeline_constant.TWIN_PATCHES]: + self._enable_feature(pipeline_constant.TWIN_PATCHES) + twin_patch_inbox = self._inbox_manager.get_twin_patch_inbox() + logger.info("Waiting for twin patches...") + try: + patch = twin_patch_inbox.get(block=block, timeout=timeout) + except InboxEmpty: + return + else: + logger.info("twin patch received") + return patch + + def _generic_handler_setter(self, handler_name, feature_name, new_handler): + self._check_receive_mode_is_handler() + setattr(self._handler_manager, handler_name, new_handler) + if new_handler is not None: + self._mqtt_pipeline.feature_enabled[feature_name] or self._enable_feature(feature_name) + else: + if new_handler is None: + if self._mqtt_pipeline.feature_enabled[feature_name]: + self._disable_feature(feature_name) + + @property + def on_twin_desired_properties_patch_received(self): + """The handler function that will be called when a twin desired properties patch + is received. + + The function definition should take one positional argument (the twin patch in the form + of a JSON dictionary object)""" + return self._handler_manager.on_twin_desired_properties_patch_received + + @on_twin_desired_properties_patch_received.setter + def on_twin_desired_properties_patch_received(self, value): + self._generic_handler_setter("on_twin_desired_properties_patch_received", pipeline_constant.TWIN_PATCHES, value) + + @property + def on_method_request_received(self): + """The handler function that will be called when a method request is received. + + The function definition should take one positional argument (the + :class:`azure.iot.device.MethodRequest` object)""" + return self._handler_manager.on_method_request_received + + @on_method_request_received.setter + def on_method_request_received(self, value): + self._generic_handler_setter("on_method_request_received", pipeline_constant.METHODS, value) + + +class IoTHubDeviceClient(GenericIoTHubClient, AbstractIoTHubDeviceClient): + __doc__ = "A synchronous device client that connects to an Azure IoT Hub instance.\n\n Intended for usage with Python 2.7 or compatibility scenarios for Python 3.5.3+.\n " + + def __init__(self, mqtt_pipeline, http_pipeline): + """Initializer for a IoTHubDeviceClient. + + This initializer should not be called directly. + Instead, use one of the 'create_from_' classmethods to instantiate + + :param mqtt_pipeline: The pipeline used to connect to the IoTHub endpoint. + :type mqtt_pipeline: :class:`azure.iot.device.iothub.pipeline.MQTTPipeline` + """ + super(IoTHubDeviceClient, self).__init__(mqtt_pipeline=mqtt_pipeline, + http_pipeline=http_pipeline) + self._mqtt_pipeline.on_c2d_message_received = CallableWeakMethod(self._inbox_manager, "route_c2d_message") + + @deprecation.deprecated(deprecated_in="2.3.0", + current_version=(device_constant.VERSION), + details="We recommend that you use the .on_message_received property to set a handler instead") + def receive_message(self, block=True, timeout=None): + """Receive a message that has been sent from the Azure IoT Hub. + + :param bool block: Indicates if the operation should block until a message is received. + :param int timeout: Optionally provide a number of seconds until blocking times out. + + :returns: Message that was sent from the Azure IoT Hub, or None if + no method request has been received by the end of the blocking period. + :rtype: :class:`azure.iot.device.Message` or None + """ + self._check_receive_mode_is_api() + if not self._mqtt_pipeline.feature_enabled[pipeline_constant.C2D_MSG]: + self._enable_feature(pipeline_constant.C2D_MSG) + c2d_inbox = self._inbox_manager.get_c2d_message_inbox() + logger.info("Waiting for message from Hub...") + try: + message = c2d_inbox.get(block=block, timeout=timeout) + except InboxEmpty: + message = None + + logger.info("Message received") + return message + + def get_storage_info_for_blob(self, blob_name): + """Sends a POST request over HTTP to an IoTHub endpoint that will return information for uploading via the Azure Storage Account linked to the IoTHub your device is connected to. + + :param str blob_name: The name in string format of the blob that will be uploaded using the storage API. This name will be used to generate the proper credentials for Storage, and needs to match what will be used with the Azure Storage SDK to perform the blob upload. + + :returns: A JSON-like (dictionary) object from IoT Hub that will contain relevant information including: correlationId, hostName, containerName, blobName, sasToken. + """ + callback = EventedCallback(return_arg_name="storage_info") + self._http_pipeline.get_storage_info_for_blob(blob_name, callback=callback) + storage_info = handle_result(callback) + logger.info("Successfully retrieved storage_info") + return storage_info + + def notify_blob_upload_status(self, correlation_id, is_success, status_code, status_description): + """When the upload is complete, the device sends a POST request to the IoT Hub endpoint with information on the status of an upload to blob attempt. This is used by IoT Hub to notify listening clients. + + :param str correlation_id: Provided by IoT Hub on get_storage_info_for_blob request. + :param bool is_success: A boolean that indicates whether the file was uploaded successfully. + :param int status_code: A numeric status code that is the status for the upload of the fiel to storage. + :param str status_description: A description that corresponds to the status_code. + """ + callback = EventedCallback() + self._http_pipeline.notify_blob_upload_status(correlation_id=correlation_id, + is_success=is_success, + status_code=status_code, + status_description=status_description, + callback=callback) + handle_result(callback) + logger.info("Successfully notified blob upload status") + + @property + def on_message_received(self): + """The handler function that will be called when a message is received. + + The function definition should take one positional argument (the + :class:`azure.iot.device.Message` object)""" + return self._handler_manager.on_message_received + + @on_message_received.setter + def on_message_received(self, value): + self._generic_handler_setter("on_message_received", pipeline_constant.C2D_MSG, value) + + +class IoTHubModuleClient(GenericIoTHubClient, AbstractIoTHubModuleClient): + __doc__ = "A synchronous module client that connects to an Azure IoT Hub or Azure IoT Edge instance.\n\n Intended for usage with Python 2.7 or compatibility scenarios for Python 3.5.3+.\n " + + def __init__(self, mqtt_pipeline, http_pipeline): + """Intializer for a IoTHubModuleClient. + + This initializer should not be called directly. + Instead, use one of the 'create_from_' classmethods to instantiate + + :param mqtt_pipeline: The pipeline used to connect to the IoTHub endpoint. + :type mqtt_pipeline: :class:`azure.iot.device.iothub.pipeline.MQTTPipeline` + :param http_pipeline: The pipeline used to connect to the IoTHub endpoint via HTTP. + :type http_pipeline: :class:`azure.iot.device.iothub.pipeline.HTTPPipeline` + """ + super(IoTHubModuleClient, self).__init__(mqtt_pipeline=mqtt_pipeline, + http_pipeline=http_pipeline) + self._mqtt_pipeline.on_input_message_received = CallableWeakMethod(self._inbox_manager, "route_input_message") + + def send_message_to_output(self, message, output_name): + """Sends an event/message to the given module output. + + These are outgoing events and are meant to be "output events". + + This is a synchronous event, meaning that this function will not return until the event + has been sent to the service and the service has acknowledged receipt of the event. + + If the connection to the service has not previously been opened by a call to connect, this + function will open the connection before sending the event. + + :param message: Message to send to the given output. Anything passed that is not an instance of the + Message class will be converted to Message object. + :type message: :class:`azure.iot.device.Message` or str + :param str output_name: Name of the output to send the event to. + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + :raises: ValueError if the message fails size validation. + """ + if not isinstance(message, Message): + message = Message(message) + if message.get_size() > device_constant.TELEMETRY_MESSAGE_SIZE_LIMIT: + raise ValueError("Size of message can not exceed 256 KB.") + message.output_name = output_name + logger.info("Sending message to output:" + output_name + "...") + callback = EventedCallback() + self._mqtt_pipeline.send_output_message(message, callback=callback) + handle_result(callback) + logger.info("Successfully sent message to output: " + output_name) + + @deprecation.deprecated(deprecated_in="2.3.0", + current_version=(device_constant.VERSION), + details="We recommend that you use the .on_message_received property to set a handler instead") + def receive_message_on_input(self, input_name, block=True, timeout=None): + """Receive an input message that has been sent from another Module to a specific input. + + :param str input_name: The input name to receive a message on. + :param bool block: Indicates if the operation should block until a message is received. + :param int timeout: Optionally provide a number of seconds until blocking times out. + + :returns: Message that was sent to the specified input, or None if + no method request has been received by the end of the blocking period. + """ + self._check_receive_mode_is_api() + if not self._mqtt_pipeline.feature_enabled[pipeline_constant.INPUT_MSG]: + self._enable_feature(pipeline_constant.INPUT_MSG) + input_inbox = self._inbox_manager.get_input_message_inbox(input_name) + logger.info("Waiting for input message on: " + input_name + "...") + try: + message = input_inbox.get(block=block, timeout=timeout) + except InboxEmpty: + message = None + + logger.info("Input message received on: " + input_name) + return message + + def invoke_method(self, method_params, device_id, module_id=None): + """Invoke a method from your client onto a device or module client, and receive the response to the method call. + + :param dict method_params: Should contain a methodName (str), payload (str), + connectTimeoutInSeconds (int), responseTimeoutInSeconds (int). + :param str device_id: Device ID of the target device where the method will be invoked. + :param str module_id: Module ID of the target module where the method will be invoked. (Optional) + + :returns: method_result should contain a status, and a payload + :rtype: dict + """ + logger.info("Invoking {} method on {}{}".format(method_params["methodName"], device_id, module_id)) + callback = EventedCallback(return_arg_name="invoke_method_response") + self._http_pipeline.invoke_method(device_id, + method_params, callback=callback, module_id=module_id) + invoke_method_response = handle_result(callback) + logger.info("Successfully invoked method") + return invoke_method_response + + @property + def on_message_received(self): + """The handler function that will be called when an input message is received. + + The function definition should take one positional argument (the + :class:`azure.iot.device.Message` object)""" + return self._handler_manager.on_message_received + + @on_message_received.setter + def on_message_received(self, value): + self._generic_handler_setter("on_message_received", pipeline_constant.INPUT_MSG, value) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_handler_manager.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_handler_manager.py new file mode 100644 index 0000000..5aeb029 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_handler_manager.py @@ -0,0 +1,197 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/sync_handler_manager.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 10716 bytes +"""This module contains the manager for handler methods used by the callback client""" +import logging, threading, abc, six +from azure.iot.device.common import handle_exceptions +from azure.iot.device.common.chainable_exception import ChainableException +from azure.iot.device.iothub.sync_inbox import InboxEmpty +import concurrent.futures +logger = logging.getLogger(__name__) +MESSAGE = "_on_message_received" +METHOD = "_on_method_request_received" +TWIN_DP_PATCH = "_on_twin_desired_properties_patch_received" + +class HandlerManagerException(ChainableException): + __doc__ = "An exception raised by a HandlerManager\n " + + +class HandlerRunnerKillerSentinel(object): + __doc__ = "An object that functions according to the sentinel design pattern.\n Insert into an Inbox in order to indicate that the Handler Runner associated with that\n Inbox should be stopped.\n " + + +@six.add_metaclass(abc.ABCMeta) +class AbstractHandlerManager(object): + __doc__ = "Partial class that defines handler manager functionality shared between sync/async" + + def __init__(self, inbox_manager): + self._inbox_manager = inbox_manager + self._handler_runners = {MESSAGE: None, + METHOD: None, + TWIN_DP_PATCH: None} + self._on_message_received = None + self._on_method_request_received = None + self._on_twin_desired_properties_patch_received = None + + def _get_inbox_for_handler(self, handler_name): + """Retrieve the inbox relevant to the handler""" + if handler_name == METHOD: + return self._inbox_manager.get_method_request_inbox() + if handler_name == TWIN_DP_PATCH: + return self._inbox_manager.get_twin_patch_inbox() + if handler_name == MESSAGE: + return self._inbox_manager.get_unified_message_inbox() + return + + @abc.abstractmethod + def _inbox_handler_runner(self, inbox, handler_name): + """Run infinite loop that waits for an inbox to receive an object from it, then calls + the handler with that object + """ + pass + + @abc.abstractmethod + def _event_handler_runner(self, handler_name): + pass + + @abc.abstractmethod + def _start_handler_runner(self, handler_name): + """Create, and store a handler runner + """ + pass + + @abc.abstractmethod + def _stop_handler_runner(self, handler_name): + """Cancel and remove a handler runner""" + pass + + def _generic_handler_setter(self, handler_name, new_handler): + """Set a handler""" + curr_handler = getattr(self, handler_name) + if new_handler is not None and curr_handler is None: + logger.debug("Creating new handler runner for handler: {}".format(handler_name)) + setattr(self, handler_name, new_handler) + self._start_handler_runner(handler_name) + else: + if new_handler is None and curr_handler is not None: + logger.debug("Removing handler runner for handler: {}".format(handler_name)) + self._stop_handler_runner(handler_name) + setattr(self, handler_name, new_handler) + else: + logger.debug("Updating set handler: {}".format(handler_name)) + setattr(self, handler_name, new_handler) + + def stop(self): + """Stop the process of invoking handlers in response to events. + All pending items will be handled prior to stoppage. + """ + for handler_name in self._handler_runners: + if self._handler_runners[handler_name] is not None: + self._stop_handler_runner(handler_name) + + def ensure_running(self): + """Ensure the process of invoking handlers in response to events is running""" + for handler_name in self._handler_runners: + if self._handler_runners[handler_name] is None and getattr(self, handler_name) is not None: + self._start_handler_runner(handler_name) + + @property + def on_message_received(self): + return self._on_message_received + + @on_message_received.setter + def on_message_received(self, value): + self._generic_handler_setter(MESSAGE, value) + + @property + def on_method_request_received(self): + return self._on_method_request_received + + @on_method_request_received.setter + def on_method_request_received(self, value): + self._generic_handler_setter(METHOD, value) + + @property + def on_twin_desired_properties_patch_received(self): + return self._on_twin_desired_properties_patch_received + + @on_twin_desired_properties_patch_received.setter + def on_twin_desired_properties_patch_received(self, value): + self._generic_handler_setter(TWIN_DP_PATCH, value) + + +class SyncHandlerManager(AbstractHandlerManager): + __doc__ = "Handler manager for use with synchronous clients" + + def _inbox_handler_runner(self, inbox, handler_name): + """Run infinite loop that waits for an inbox to receive an object from it, then calls + the handler with that object + """ + logger.debug("HANDLER RUNNER ({}): Starting runner".format(handler_name)) + + def _handler_callback(future): + try: + e = future.exception(timeout=0) + except Exception as raised_e: + try: + new_err = HandlerManagerException(message=("HANDLER ({}): Unable to retrieve exception data from incomplete invocation".format(handler_name)), + cause=raised_e) + handle_exceptions.handle_background_exception(new_err) + finally: + raised_e = None + del raised_e + + else: + if e: + new_err = HandlerManagerException(message=("HANDLER ({}): Error during invocation".format(handler_name)), + cause=e) + handle_exceptions.handle_background_exception(new_err) + else: + logger.debug("HANDLER ({}): Successfully completed invocation".format(handler_name)) + + tpe = concurrent.futures.ThreadPoolExecutor(max_workers=4) + while True: + handler_arg = inbox.get() + if isinstance(handler_arg, HandlerRunnerKillerSentinel): + logger.debug("HANDLER RUNNER ({}): HandlerRunnerKillerSentinel found in inbox. Exiting.".format(handler_name)) + tpe.shutdown() + break + handler = getattr(self, handler_name) + logger.debug("HANDLER RUNNER ({}): Invoking handler".format(handler_name)) + fut = tpe.submit(handler, handler_arg) + fut.add_done_callback(_handler_callback) + + def _event_handler_runner(self, handler_name): + logger.error(".event_handler_runner() not yet implemented") + + def _start_handler_runner(self, handler_name): + """Start and store a handler runner thread + """ + if self._handler_runners[handler_name] is not None: + raise HandlerManagerException("Cannot create thread for handler runner: {}. Runner thread already exists".format(handler_name)) + else: + inbox = self._get_inbox_for_handler(handler_name) + if inbox: + thread = threading.Thread(target=(self._inbox_handler_runner), args=[inbox, handler_name]) + else: + thread = threading.Thread(target=(self._event_handler_runner), args=[handler_name]) + thread.daemon = True + self._handler_runners[handler_name] = thread + thread.start() + + def _stop_handler_runner(self, handler_name): + """Stop and remove a handler runner task. + All pending items in the corresponding inbox will be handled by the handler before stoppage. + """ + logger.debug("Adding HandlerRunnerKillerSentinel to inbox corresponding to {} handler runner".format(handler_name)) + inbox = self._get_inbox_for_handler(handler_name) + inbox._put(HandlerRunnerKillerSentinel()) + logger.debug("Waiting for {} handler runner to exit...".format(handler_name)) + thread = self._handler_runners[handler_name] + thread.join() + self._handler_runners[handler_name] = None + logger.debug("Handler runner for {} has been stopped".format(handler_name)) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_inbox.py b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_inbox.py new file mode 100644 index 0000000..f141df0 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/iothub/sync_inbox.py @@ -0,0 +1,119 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/iothub/sync_inbox.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3726 bytes +"""This module contains an Inbox class for use with a synchronous client.""" +from six.moves import queue +import six +from abc import ABCMeta, abstractmethod + +class InboxEmpty(Exception): + pass + + +@six.add_metaclass(ABCMeta) +class AbstractInbox: + __doc__ = "Abstract Base Class for Inbox.\n\n Holds generic incoming data for a client.\n\n All methods, when implemented, should be threadsafe.\n " + + @abstractmethod + def _put(self, item): + """Put an item into the Inbox. + + Implementation should block until a free slot is available. + Implementation MUST be a synchronous function. + Only to be used by the InboxManager. + + :param item: The item to put in the Inbox. + """ + pass + + @abstractmethod + def get(self): + """Remove and return an item from the inbox. + + Implementation should have the capability to block until an item is available. + Implementation can be a synchronous function or an asynchronous coroutine. + + :returns: An item from the Inbox. + """ + pass + + @abstractmethod + def empty(self): + """Returns True if the inbox is empty, False otherwise + + :returns: Boolean indicating if the inbox is empty + """ + pass + + @abstractmethod + def clear(self): + """Remove all items from the inbox. + """ + pass + + +class SyncClientInbox(AbstractInbox): + __doc__ = "Holds generic incoming data for a synchronous client.\n\n All methods implemented in this class are threadsafe.\n " + + def __init__(self): + """Initializer for SyncClientInbox""" + self._queue = queue.Queue() + + def __contains__(self, item): + """Return True if item is in Inbox, False otherwise""" + with self._queue.mutex: + return item in self._queue.queue + + def _put(self, item): + """Put an item into the inbox. + + Block if necessary until a free slot is available. + Only to be used by the InboxManager. + + :param item: The item to put in the inbox. + """ + self._queue.put(item) + + def get(self, block=True, timeout=None): + """Remove and return an item from the inbox. + + :param bool block: Indicates if the operation should block until an item is available. + Default True. + :param int timeout: Optionally provide a number of seconds until blocking times out. + + :raises: InboxEmpty if timeout occurs because the inbox is empty + :raises: InboxEmpty if inbox is empty in non-blocking mode + + :returns: An item from the Inbox + """ + try: + return self._queue.get(block=block, timeout=timeout) + except queue.Empty: + raise InboxEmpty("Inbox is empty") + + def empty(self): + """Returns True if the inbox is empty, False otherwise. + + Note that there is a race condition here, and this may not be accurate as the queue size + may change while this operation is occurring. + + :returns: Boolean indicating if the inbox is empty + """ + return self._queue.empty() + + def join(self): + """Block until all items in the inbox have been gotten and processed. + + Only really used for test code. + """ + return self._queue.join() + + def clear(self): + """Remove all items from the inbox. + """ + with self._queue.mutex: + self._queue.queue.clear() diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/patch.py b/APPS_UNCOMPILED/lib/azure/iot/device/patch.py new file mode 100644 index 0000000..e32d701 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/patch.py @@ -0,0 +1,88 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/patch.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 7347 bytes +"""This module provides patches used to dynamically modify items from the libraries""" +import sys, inspect, logging +logger = logging.getLogger(__name__) +shim_scope = {} + +def add_shims_for_inherited_methods(target_class): + """Dynamically add overriding, pass-through shim methods for all public inherited methods + on a child class, which simply call into the parent class implementation of the same method. + + These shim methods will include the same docstrings as the method from the parent class. + + This currently only works for Python 3.5+ + + Using DEBUG logging will allow you to see output of all dynamic operations that occur within + for debugging purposes. + + :param target_class: The child class to add shim methods to + """ + class_functions = inspect.getmembers(target_class, predicate=(inspect.isfunction)) + class_methods = inspect.getmembers(target_class, predicate=(inspect.ismethod)) + all_methods = class_functions + class_methods + class_attributes = inspect.classify_class_attrs(target_class) + classname_alias = target_class.__name__ + while classname_alias in shim_scope: + classname_alias += "_" + + class_module = inspect.getmodule(target_class) + import_cmdstr = "from {module} import {target_class} as {alias}".format(module=(class_module.__name__), + target_class=(target_class.__name__), + alias=classname_alias) + logger.debug("exec: " + import_cmdstr) + for method in all_methods: + method_name = method[0] + method_obj = method[1] + method_attribute = [att for att in class_attributes if att.name == method_name][0] + originating_class_obj = method_attribute.defining_class + if method_name[0] != "_": + if originating_class_obj != target_class: + method_sig = inspect.signature(method_obj) + sig_params = method_sig.parameters + if inspect.ismethod(method_obj): + complete_params = [] + complete_params.append(inspect.Parameter("cls", inspect.Parameter.POSITIONAL_OR_KEYWORD)) + complete_params += list(sig_params.values()) + method_sig = method_sig.replace(parameters=complete_params) + else: + invoke_params_list = [] + for param in sig_params.values(): + if param.name != "self" and param.name != "cls": + new_param = param.replace(default=(inspect.Parameter.empty)) + invoke_params_list.append(new_param) + + invoke_params = method_sig.replace(parameters=invoke_params_list) + if inspect.ismethod(method_obj): + obj_or_type = "cls" + else: + obj_or_type = "self" + if inspect.iscoroutine(method_obj) or inspect.iscoroutinefunction(method_obj): + def_syntax = "async def" + ret_syntax = "return await" + else: + def_syntax = "def" + ret_syntax = "return" + fn_def_cmdstr = "{def_syntax} {method_name}{signature}: {ret_syntax} super({leaf_class}, {object_or_type}).{method_name}{invocation}".format(def_syntax=def_syntax, + method_name=method_name, + signature=(str(method_sig)), + ret_syntax=ret_syntax, + leaf_class=classname_alias, + object_or_type=obj_or_type, + invocation=(str(invoke_params))) + logger.debug("exec: " + fn_def_cmdstr) + set_doc_cmdstr = "{method_name}.__doc__ = {leaf_class}.{method_name}.__doc__".format(method_name=method_name, + leaf_class=classname_alias) + logger.debug("exec: " + set_doc_cmdstr) + if inspect.ismethod(method_obj): + attach_shim_cmdstr = "setattr({leaf_class}, '{method_name}', classmethod({method_name}))".format(leaf_class=classname_alias, + method_name=method_name) + else: + attach_shim_cmdstr = "setattr({leaf_class}, '{method_name}', {method_name})".format(leaf_class=classname_alias, + method_name=method_name) + logger.debug("exec: " + attach_shim_cmdstr) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/patch_documentation.py b/APPS_UNCOMPILED/lib/azure/iot/device/patch_documentation.py new file mode 100644 index 0000000..7f1b9c3 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/patch_documentation.py @@ -0,0 +1,189 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/patch_documentation.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 10406 bytes +"""This module provides hard coded patches used to modify items from the libraries. +Currently we have to do like this so that we don't use exec anywhere""" + +def execute_patch_for_sync(): + from azure.iot.device.iothub.sync_clients import IoTHubDeviceClient + + def connect(self): + return super(IoTHubDeviceClient, self).connect() + + connect.__doc__ = IoTHubDeviceClient.connect.__doc__ + setattr(IoTHubDeviceClient, "connect", connect) + + def disconnect(self): + return super(IoTHubDeviceClient, self).disconnect() + + disconnect.__doc__ = IoTHubDeviceClient.disconnect.__doc__ + setattr(IoTHubDeviceClient, "disconnect", disconnect) + + def get_twin(self): + return super(IoTHubDeviceClient, self).get_twin() + + get_twin.__doc__ = IoTHubDeviceClient.get_twin.__doc__ + setattr(IoTHubDeviceClient, "get_twin", get_twin) + + def patch_twin_reported_properties(self, reported_properties_patch): + return super(IoTHubDeviceClient, self).patch_twin_reported_properties(reported_properties_patch) + + patch_twin_reported_properties.__doc__ = IoTHubDeviceClient.patch_twin_reported_properties.__doc__ + setattr(IoTHubDeviceClient, "patch_twin_reported_properties", patch_twin_reported_properties) + + def receive_method_request(self, method_name=None, block=True, timeout=None): + return super(IoTHubDeviceClient, self).receive_method_request(method_name, block, timeout) + + receive_method_request.__doc__ = IoTHubDeviceClient.receive_method_request.__doc__ + setattr(IoTHubDeviceClient, "receive_method_request", receive_method_request) + + def receive_twin_desired_properties_patch(self, block=True, timeout=None): + return super(IoTHubDeviceClient, self).receive_twin_desired_properties_patch(block, timeout) + + receive_twin_desired_properties_patch.__doc__ = IoTHubDeviceClient.receive_twin_desired_properties_patch.__doc__ + setattr(IoTHubDeviceClient, "receive_twin_desired_properties_patch", receive_twin_desired_properties_patch) + + def send_message(self, message): + return super(IoTHubDeviceClient, self).send_message(message) + + send_message.__doc__ = IoTHubDeviceClient.send_message.__doc__ + setattr(IoTHubDeviceClient, "send_message", send_message) + + def send_method_response(self, method_response): + return super(IoTHubDeviceClient, self).send_method_response(method_response) + + send_method_response.__doc__ = IoTHubDeviceClient.send_method_response.__doc__ + setattr(IoTHubDeviceClient, "send_method_response", send_method_response) + + def update_sastoken(self, sastoken): + return super(IoTHubDeviceClient, self).update_sastoken(sastoken) + + update_sastoken.__doc__ = IoTHubDeviceClient.update_sastoken.__doc__ + setattr(IoTHubDeviceClient, "update_sastoken", update_sastoken) + + def create_from_connection_string(cls, connection_string, **kwargs): + return (super(IoTHubDeviceClient, cls).create_from_connection_string)( + connection_string, **kwargs) + + create_from_connection_string.__doc__ = IoTHubDeviceClient.create_from_connection_string.__doc__ + setattr(IoTHubDeviceClient, "create_from_connection_string", classmethod(create_from_connection_string)) + + def create_from_sastoken(cls, sastoken, **kwargs): + return (super(IoTHubDeviceClient, cls).create_from_sastoken)(sastoken, **kwargs) + + create_from_sastoken.__doc__ = IoTHubDeviceClient.create_from_sastoken.__doc__ + setattr(IoTHubDeviceClient, "create_from_sastoken", classmethod(create_from_sastoken)) + + def create_from_symmetric_key(cls, symmetric_key, hostname, device_id, **kwargs): + return (super(IoTHubDeviceClient, cls).create_from_symmetric_key)( + symmetric_key, hostname, device_id, **kwargs) + + create_from_symmetric_key.__doc__ = IoTHubDeviceClient.create_from_symmetric_key.__doc__ + setattr(IoTHubDeviceClient, "create_from_symmetric_key", classmethod(create_from_symmetric_key)) + + def create_from_x509_certificate(cls, x509, hostname, device_id, **kwargs): + return (super(IoTHubDeviceClient, cls).create_from_x509_certificate)( + x509, hostname, device_id, **kwargs) + + create_from_x509_certificate.__doc__ = IoTHubDeviceClient.create_from_x509_certificate.__doc__ + setattr(IoTHubDeviceClient, "create_from_x509_certificate", classmethod(create_from_x509_certificate)) + from azure.iot.device.iothub.sync_clients import IoTHubModuleClient + + def connect(self): + return super(IoTHubModuleClient, self).connect() + + connect.__doc__ = IoTHubModuleClient.connect.__doc__ + setattr(IoTHubModuleClient, "connect", connect) + + def disconnect(self): + return super(IoTHubModuleClient, self).disconnect() + + disconnect.__doc__ = IoTHubModuleClient.disconnect.__doc__ + setattr(IoTHubModuleClient, "disconnect", disconnect) + + def get_twin(self): + return super(IoTHubModuleClient, self).get_twin() + + get_twin.__doc__ = IoTHubModuleClient.get_twin.__doc__ + setattr(IoTHubModuleClient, "get_twin", get_twin) + + def patch_twin_reported_properties(self, reported_properties_patch): + return super(IoTHubModuleClient, self).patch_twin_reported_properties(reported_properties_patch) + + patch_twin_reported_properties.__doc__ = IoTHubModuleClient.patch_twin_reported_properties.__doc__ + setattr(IoTHubModuleClient, "patch_twin_reported_properties", patch_twin_reported_properties) + + def receive_method_request(self, method_name=None, block=True, timeout=None): + return super(IoTHubModuleClient, self).receive_method_request(method_name, block, timeout) + + receive_method_request.__doc__ = IoTHubModuleClient.receive_method_request.__doc__ + setattr(IoTHubModuleClient, "receive_method_request", receive_method_request) + + def receive_twin_desired_properties_patch(self, block=True, timeout=None): + return super(IoTHubModuleClient, self).receive_twin_desired_properties_patch(block, timeout) + + receive_twin_desired_properties_patch.__doc__ = IoTHubModuleClient.receive_twin_desired_properties_patch.__doc__ + setattr(IoTHubModuleClient, "receive_twin_desired_properties_patch", receive_twin_desired_properties_patch) + + def send_message(self, message): + return super(IoTHubModuleClient, self).send_message(message) + + send_message.__doc__ = IoTHubModuleClient.send_message.__doc__ + setattr(IoTHubModuleClient, "send_message", send_message) + + def send_method_response(self, method_response): + return super(IoTHubModuleClient, self).send_method_response(method_response) + + send_method_response.__doc__ = IoTHubModuleClient.send_method_response.__doc__ + setattr(IoTHubModuleClient, "send_method_response", send_method_response) + + def update_sastoken(self, sastoken): + return super(IoTHubModuleClient, self).update_sastoken(sastoken) + + update_sastoken.__doc__ = IoTHubModuleClient.update_sastoken.__doc__ + setattr(IoTHubModuleClient, "update_sastoken", update_sastoken) + + def create_from_connection_string(cls, connection_string, **kwargs): + return (super(IoTHubModuleClient, cls).create_from_connection_string)( + connection_string, **kwargs) + + create_from_connection_string.__doc__ = IoTHubModuleClient.create_from_connection_string.__doc__ + setattr(IoTHubModuleClient, "create_from_connection_string", classmethod(create_from_connection_string)) + + def create_from_edge_environment(cls, **kwargs): + return (super(IoTHubModuleClient, cls).create_from_edge_environment)(**kwargs) + + create_from_edge_environment.__doc__ = IoTHubModuleClient.create_from_edge_environment.__doc__ + setattr(IoTHubModuleClient, "create_from_edge_environment", classmethod(create_from_edge_environment)) + + def create_from_sastoken(cls, sastoken, **kwargs): + return (super(IoTHubModuleClient, cls).create_from_sastoken)(sastoken, **kwargs) + + create_from_sastoken.__doc__ = IoTHubModuleClient.create_from_sastoken.__doc__ + setattr(IoTHubModuleClient, "create_from_sastoken", classmethod(create_from_sastoken)) + + def create_from_x509_certificate(cls, x509, hostname, device_id, module_id, **kwargs): + return (super(IoTHubModuleClient, cls).create_from_x509_certificate)( + x509, hostname, device_id, module_id, **kwargs) + + create_from_x509_certificate.__doc__ = IoTHubModuleClient.create_from_x509_certificate.__doc__ + setattr(IoTHubModuleClient, "create_from_x509_certificate", classmethod(create_from_x509_certificate)) + from azure.iot.device.provisioning.provisioning_device_client import ProvisioningDeviceClient + + def create_from_symmetric_key(cls, provisioning_host, registration_id, id_scope, symmetric_key, **kwargs): + return (super(ProvisioningDeviceClient, cls).create_from_symmetric_key)( + provisioning_host, registration_id, id_scope, symmetric_key, **kwargs) + + create_from_symmetric_key.__doc__ = ProvisioningDeviceClient.create_from_symmetric_key.__doc__ + setattr(ProvisioningDeviceClient, "create_from_symmetric_key", classmethod(create_from_symmetric_key)) + + def create_from_x509_certificate(cls, provisioning_host, registration_id, id_scope, x509, **kwargs): + return (super(ProvisioningDeviceClient, cls).create_from_x509_certificate)( + provisioning_host, registration_id, id_scope, x509, **kwargs) + + create_from_x509_certificate.__doc__ = ProvisioningDeviceClient.create_from_x509_certificate.__doc__ + setattr(ProvisioningDeviceClient, "create_from_x509_certificate", classmethod(create_from_x509_certificate)) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/__init__.py new file mode 100644 index 0000000..81e1bbb --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/__init__.py @@ -0,0 +1,17 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/__init__.py +# Compiled at: 2024-04-18 03:12:59 +# Size of source mod 2**32: 440 bytes +"""Azure Provisioning Device Library + +This library provides functionality that enables zero-touch, just-in-time provisioning to the right IoT hub without requiring +human intervention, enabling customers to provision millions of devices in a secure and scalable manner. + +""" +from .provisioning_device_client import ProvisioningDeviceClient +from .models import RegistrationResult +__all__ = [ + "ProvisioningDeviceClient", "RegistrationResult"] diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/abstract_provisioning_device_client.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/abstract_provisioning_device_client.py new file mode 100644 index 0000000..37bc1e5 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/abstract_provisioning_device_client.py @@ -0,0 +1,197 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/abstract_provisioning_device_client.py +# Compiled at: 2024-04-18 03:12:59 +# Size of source mod 2**32: 10465 bytes +""" +This module provides an abstract interface representing clients which can communicate with the +Device Provisioning Service. +""" +import abc, six, logging +from azure.iot.device.provisioning import pipeline +from azure.iot.device.common.auth import sastoken as st +from azure.iot.device.common import auth +logger = logging.getLogger(__name__) + +def _validate_kwargs(exclude=[], **kwargs): + """Helper function to validate user provided kwargs. + Raises TypeError if an invalid option has been provided""" + valid_kwargs = [ + 'websockets', 'cipher', 'proxy_options', 'sastoken_ttl', 'keep_alive'] + for kwarg in kwargs: + if kwarg not in valid_kwargs or kwarg in exclude: + raise TypeError("Unsupported keyword argument '{}'".format(kwarg)) + + +def validate_registration_id(reg_id): + if not (reg_id and reg_id.strip()): + raise ValueError("Registration Id can not be none, empty or blank.") + + +def _get_config_kwargs(**kwargs): + """Get the subset of kwargs which pertain the config object""" + valid_config_kwargs = [ + "websockets", "cipher", "proxy_options", "keep_alive"] + config_kwargs = {} + for kwarg in kwargs: + if kwarg in valid_config_kwargs: + config_kwargs[kwarg] = kwargs[kwarg] + + return config_kwargs + + +def _form_sas_uri(id_scope, registration_id): + return "{id_scope}/registrations/{registration_id}".format(id_scope=id_scope, + registration_id=registration_id) + + +@six.add_metaclass(abc.ABCMeta) +class AbstractProvisioningDeviceClient(object): + __doc__ = "\n Super class for any client that can be used to register devices to Device Provisioning Service.\n " + + def __init__(self, pipeline): + """ + Initializes the provisioning client. + + NOTE: This initializer should not be called directly. + Instead, the class methods that start with `create_from_` should be used to create a + client object. + + :param pipeline: Instance of the provisioning pipeline object. + :type pipeline: :class:`azure.iot.device.provisioning.pipeline.MQTTPipeline` + """ + self._pipeline = pipeline + self._provisioning_payload = None + + @classmethod + def create_from_symmetric_key(cls, provisioning_host, registration_id, id_scope, symmetric_key, **kwargs): + """ + Create a client which can be used to run the registration of a device with provisioning service + using Symmetric Key authentication. + + :param str provisioning_host: Host running the Device Provisioning Service. + Can be found in the Azure portal in the Overview tab as the string Global device endpoint. + :param str registration_id: The registration ID used to uniquely identify a device in the + Device Provisioning Service. The registration ID is alphanumeric, lowercase string + and may contain hyphens. + :param str id_scope: The ID scope used to uniquely identify the specific provisioning + service the device will register through. The ID scope is assigned to a + Device Provisioning Service when it is created by the user and is generated by the + service and is immutable, guaranteeing uniqueness. + :param str symmetric_key: The key which will be used to create the shared access signature + token to authenticate the device with the Device Provisioning Service. By default, + the Device Provisioning Service creates new symmetric keys with a default length of + 32 bytes when new enrollments are saved with the Auto-generate keys option enabled. + Users can provide their own symmetric keys for enrollments by disabling this option + within 16 bytes and 64 bytes and in valid Base64 format. + + :param bool websockets: Configuration Option. Default is False. Set to true if using MQTT + over websockets. + :param cipher: Configuration Option. Cipher suite(s) for TLS/SSL, as a string in + "OpenSSL cipher list format" or as a list of cipher suite strings. + :type cipher: str or list(str) + :param proxy_options: Options for sending traffic through proxy servers. + :type proxy_options: :class:`azure.iot.device.ProxyOptions` + :param int keepalive: Maximum period in seconds between communications with the + broker. If no other messages are being exchanged, this controls the + rate at which the client will send ping messages to the broker. + If not provided default value of 60 secs will be used. + :raises: TypeError if given an unrecognized parameter. + + :returns: A ProvisioningDeviceClient instance which can register via Symmetric Key. + """ + validate_registration_id(registration_id) + _validate_kwargs(**kwargs) + uri = _form_sas_uri(id_scope=id_scope, registration_id=registration_id) + signing_mechanism = auth.SymmetricKeySigningMechanism(key=symmetric_key) + token_ttl = kwargs.get("sastoken_ttl", 3600) + try: + sastoken = st.RenewableSasToken(uri, signing_mechanism, ttl=token_ttl) + except st.SasTokenError as e: + try: + new_err = ValueError("Could not create a SasToken using the provided values") + new_err.__cause__ = e + raise new_err + finally: + e = None + del e + + config_kwargs = _get_config_kwargs(**kwargs) + pipeline_configuration = (pipeline.ProvisioningPipelineConfig)(**, **config_kwargs) + mqtt_provisioning_pipeline = pipeline.MQTTPipeline(pipeline_configuration) + return cls(mqtt_provisioning_pipeline) + + @classmethod + def create_from_x509_certificate(cls, provisioning_host, registration_id, id_scope, x509, **kwargs): + """ + Create a client which can be used to run the registration of a device with + provisioning service using X509 certificate authentication. + + :param str provisioning_host: Host running the Device Provisioning Service. Can be found in + the Azure portal in the Overview tab as the string Global device endpoint. + :param str registration_id: The registration ID used to uniquely identify a device in the + Device Provisioning Service. The registration ID is alphanumeric, lowercase string + and may contain hyphens. + :param str id_scope: The ID scope is used to uniquely identify the specific + provisioning service the device will register through. The ID scope is assigned to a + Device Provisioning Service when it is created by the user and is generated by the + service and is immutable, guaranteeing uniqueness. + :param x509: The x509 certificate, To use the certificate the enrollment object needs to + contain cert (either the root certificate or one of the intermediate CA certificates). + If the cert comes from a CER file, it needs to be base64 encoded. + :type x509: :class:`azure.iot.device.X509` + + :param bool websockets: Configuration Option. Default is False. Set to true if using MQTT + over websockets. + :param cipher: Configuration Option. Cipher suite(s) for TLS/SSL, as a string in + "OpenSSL cipher list format" or as a list of cipher suite strings. + :type cipher: str or list(str) + :param proxy_options: Options for sending traffic through proxy servers. + :type proxy_options: :class:`azure.iot.device.ProxyOptions` + :param int keepalive: Maximum period in seconds between communications with the + broker. If no other messages are being exchanged, this controls the + rate at which the client will send ping messages to the broker. + If not provided default value of 60 secs will be used. + :raises: TypeError if given an unrecognized parameter. + + :returns: A ProvisioningDeviceClient which can register via X509 client certificates. + """ + validate_registration_id(registration_id) + excluded_kwargs = [ + "sastoken_ttl"] + _validate_kwargs(exclude=excluded_kwargs, **kwargs) + config_kwargs = _get_config_kwargs(**kwargs) + pipeline_configuration = (pipeline.ProvisioningPipelineConfig)(**, **config_kwargs) + mqtt_provisioning_pipeline = pipeline.MQTTPipeline(pipeline_configuration) + return cls(mqtt_provisioning_pipeline) + + @abc.abstractmethod + def register(self): + """ + Register the device with the Device Provisioning Service. + """ + pass + + @property + def provisioning_payload(self): + return self._provisioning_payload + + @provisioning_payload.setter + def provisioning_payload(self, provisioning_payload): + """ + Set the payload that will form the request payload in a registration request. + + :param provisioning_payload: The payload that can be supplied by the user. + :type provisioning_payload: This can be an object or dictionary or a string or an integer. + """ + self._provisioning_payload = provisioning_payload + + +def log_on_register_complete(result=None): + if result is not None: + if result.status == "assigned": + logger.info("Successfully registered with Provisioning Service") + else: + logger.info("Failed registering with Provisioning Service") diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/aio/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/aio/__init__.py new file mode 100644 index 0000000..5d39900 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/aio/__init__.py @@ -0,0 +1,15 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/aio/__init__.py +# Compiled at: 2024-04-18 03:12:59 +# Size of source mod 2**32: 282 bytes +"""Azure IoT Provisioning Service SDK - Asynchronous + +This SDK provides asynchronous functionality for communicating with the Azure Provisioning Service +as a Device. +""" +from .async_provisioning_device_client import ProvisioningDeviceClient +__all__ = [ + "ProvisioningDeviceClient"] diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/aio/async_provisioning_device_client.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/aio/async_provisioning_device_client.py new file mode 100644 index 0000000..1c5d8c5 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/aio/async_provisioning_device_client.py @@ -0,0 +1,103 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/aio/async_provisioning_device_client.py +# Compiled at: 2024-04-18 03:12:59 +# Size of source mod 2**32: 4263 bytes +""" +This module contains user-facing asynchronous Provisioning Device Client for Azure Provisioning +Device SDK. This client uses Symmetric Key and X509 authentication to register devices with an +IoT Hub via the Device Provisioning Service. +""" +import logging +from azure.iot.device.common import async_adapter +from azure.iot.device.provisioning.abstract_provisioning_device_client import AbstractProvisioningDeviceClient +from azure.iot.device.provisioning.abstract_provisioning_device_client import log_on_register_complete +from azure.iot.device.provisioning.pipeline import exceptions as pipeline_exceptions +from azure.iot.device import exceptions +from azure.iot.device.provisioning.pipeline import constant as dps_constant +logger = logging.getLogger(__name__) + +async def handle_result(callback): + try: + return await callback.completion() + except pipeline_exceptions.ConnectionDroppedError as e: + try: + raise exceptions.ConnectionDroppedError(message="Lost connection to IoTHub", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ConnectionFailedError as e: + try: + raise exceptions.ConnectionFailedError(message="Could not connect to IoTHub", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.UnauthorizedError as e: + try: + raise exceptions.CredentialError(message="Credentials invalid, could not connect", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ProtocolClientError as e: + try: + raise exceptions.ClientError(message="Error in the IoTHub client", cause=e) + finally: + e = None + del e + + except Exception as e: + try: + raise exceptions.ClientError(message="Unexpected failure", cause=e) + finally: + e = None + del e + + +class ProvisioningDeviceClient(AbstractProvisioningDeviceClient): + __doc__ = "\n Client which can be used to run the registration of a device with provisioning service\n using Symmetric Key or X509 authentication.\n " + + async def register(self): + """ + Register the device with the provisioning service. + + Before returning the client will also disconnect from the provisioning service. + If a registration attempt is made while a previous registration is in progress it may + throw an error. + + :returns: RegistrationResult indicating the result of the registration. + :rtype: :class:`azure.iot.device.RegistrationResult` + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if a establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + + """ + logger.info("Registering with Provisioning Service...") + if not self._pipeline.responses_enabled[dps_constant.REGISTER]: + await self._enable_responses() + register_async = async_adapter.emulate_async(self._pipeline.register) + register_complete = async_adapter.AwaitableCallback(return_arg_name="result") + await register_async(payload=(self._provisioning_payload), callback=register_complete) + result = await handle_result(register_complete) + log_on_register_complete(result) + return result + + async def _enable_responses(self): + """Enable to receive responses from Device Provisioning Service. + """ + logger.info("Enabling reception of response from Device Provisioning Service...") + subscribe_async = async_adapter.emulate_async(self._pipeline.enable_responses) + subscription_complete = async_adapter.AwaitableCallback() + await subscribe_async(callback=subscription_complete) + await handle_result(subscription_complete) + logger.info("Successfully subscribed to Device Provisioning Service to receive responses") diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/models/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/models/__init__.py new file mode 100644 index 0000000..c1f0f2b --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/models/__init__.py @@ -0,0 +1,12 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/models/__init__.py +# Compiled at: 2024-04-18 03:12:59 +# Size of source mod 2**32: 180 bytes +"""Azure Provisioning Device Models + +This package provides object models for use within the Azure Provisioning Device SDK. +""" +from .registration_result import RegistrationResult diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/models/registration_result.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/models/registration_result.py new file mode 100644 index 0000000..57b2bf4 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/models/registration_result.py @@ -0,0 +1,93 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/models/registration_result.py +# Compiled at: 2024-04-18 03:12:59 +# Size of source mod 2**32: 4292 bytes +import json + +class RegistrationResult(object): + __doc__ = '\n The final result of a completed or failed registration attempt\n :ivar:request_id: The request id to which the response is being obtained\n :ivar:operation_id: The id of the operation as returned by the registration request.\n :ivar status: The status of the registration process as returned by the provisioning service.\n Values can be "unassigned", "assigning", "assigned", "failed", "disabled"\n :ivar registration_state : Details like device id, assigned hub , date times etc returned\n from the provisioning service.\n ' + + def __init__(self, operation_id, status, registration_state=None): + """ + :param operation_id: The id of the operation as returned by the initial registration request. + :param status: The status of the registration process. + Values can be "unassigned", "assigning", "assigned", "failed", "disabled" + :param registration_state : Details like device id, assigned hub , date times etc returned + from the provisioning service. + """ + self._operation_id = operation_id + self._status = status + self._registration_state = registration_state + + @property + def operation_id(self): + return self._operation_id + + @property + def status(self): + return self._status + + @property + def registration_state(self): + return self._registration_state + + def __str__(self): + return "\n".join([str(self.registration_state), self.status]) + + +class RegistrationState(object): + __doc__ = '\n The registration state regarding the device.\n :ivar device_id: Desired device id for the provisioned device\n :ivar assigned_hub: Desired IoT Hub to which the device is linked.\n :ivar sub_status: Substatus for \'Assigned\' devices. Possible values are\n "initialAssignment", "deviceDataMigrated", "deviceDataReset"\n :ivar created_date_time: Registration create date time (in UTC).\n :ivar last_update_date_time: Last updated date time (in UTC).\n :ivar etag: The entity tag associated with the resource.\n ' + + def __init__(self, device_id=None, assigned_hub=None, sub_status=None, created_date_time=None, last_update_date_time=None, etag=None, payload=None): + """ + :param device_id: Desired device id for the provisioned device + :param assigned_hub: Desired IoT Hub to which the device is linked. + :param sub_status: Substatus for 'Assigned' devices. Possible values are + "initialAssignment", "deviceDataMigrated", "deviceDataReset" + :param created_date_time: Registration create date time (in UTC). + :param last_update_date_time: Last updated date time (in UTC). + :param etag: The entity tag associated with the resource. + :param payload: The payload with which hub is responding + """ + self._device_id = device_id + self._assigned_hub = assigned_hub + self._sub_status = sub_status + self._created_date_time = created_date_time + self._last_update_date_time = last_update_date_time + self._etag = etag + self._response_payload = payload + + @property + def device_id(self): + return self._device_id + + @property + def assigned_hub(self): + return self._assigned_hub + + @property + def sub_status(self): + return self._sub_status + + @property + def created_date_time(self): + return self._created_date_time + + @property + def last_update_date_time(self): + return self._last_update_date_time + + @property + def etag(self): + return self._etag + + @property + def response_payload(self): + return json.dumps((self._response_payload), default=(lambda o: o.__dict__), sort_keys=True) + + def __str__(self): + return "\n".join([ + self.device_id, self.assigned_hub, self.sub_status, self.response_payload]) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/__init__.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/__init__.py new file mode 100644 index 0000000..fa227ae --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/__init__.py @@ -0,0 +1,15 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 244 bytes +"""Azure Provisioning Device Communication Pipeline + +This package provides pipeline for use with the Azure Provisioning Device SDK. + +INTERNAL USAGE ONLY +""" +from .mqtt_pipeline import MQTTPipeline +from .config import ProvisioningPipelineConfig diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/config.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/config.py new file mode 100644 index 0000000..198694b --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/config.py @@ -0,0 +1,26 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/config.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1518 bytes +import logging +from azure.iot.device.common.pipeline.config import BasePipelineConfig +logger = logging.getLogger(__name__) + +class ProvisioningPipelineConfig(BasePipelineConfig): + __doc__ = "A class for storing all configurations/options for Provisioning clients in the Azure IoT Python Device Client Library.\n " + + def __init__(self, hostname, registration_id, id_scope, **kwargs): + """Initializer for ProvisioningPipelineConfig which passes all unrecognized keyword-args down to BasePipelineConfig + to be evaluated. This stacked options setting is to allow for unique configuration options to exist between the + multiple clients, while maintaining a base configuration class with shared config options. + + :param str hostname: The hostname of the Provisioning hub instance to connect to + :param str registration_id: The device registration identity being provisioned + :param str id_scope: The identity of the provisoning service being used + """ + (super(ProvisioningPipelineConfig, self).__init__)(hostname=hostname, **kwargs) + self.registration_id = registration_id + self.id_scope = id_scope diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/constant.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/constant.py new file mode 100644 index 0000000..a734020 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/constant.py @@ -0,0 +1,17 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/constant.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1152 bytes +""" +This module contains constants related to the pipeline package. +""" +REGISTER = "register" +QUERY = "query" +DEFAULT_POLLING_INTERVAL = 2 +DEFAULT_TIMEOUT_INTERVAL = 30 +SUBSCRIBE_TOPIC_PROVISIONING = "$dps/registrations/res/#" +PUBLISH_TOPIC_REGISTRATION = "$dps/registrations/PUT/iotdps-register/?$rid={}" +PUBLISH_TOPIC_QUERYING = "$dps/registrations/GET/iotdps-get-operationstatus/?$rid={}&operationId={}" diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/exceptions.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/exceptions.py new file mode 100644 index 0000000..67fdf55 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/exceptions.py @@ -0,0 +1,10 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/exceptions.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1162 bytes +"""This module defines an exception surface, exposed as part of the pipeline API""" +from azure.iot.device.common.pipeline.pipeline_exceptions import * +from azure.iot.device.common.transport_exceptions import ConnectionFailedError, ConnectionDroppedError, UnauthorizedError, ProtocolClientError diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py new file mode 100644 index 0000000..dad9e5b --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py @@ -0,0 +1,128 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/mqtt_pipeline.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 9516 bytes +import logging +from azure.iot.device.common.evented_callback import EventedCallback +from azure.iot.device.common.pipeline import pipeline_stages_base +from azure.iot.device.common.pipeline import pipeline_ops_base +from azure.iot.device.common.pipeline import pipeline_stages_mqtt +from azure.iot.device.provisioning.pipeline import pipeline_stages_provisioning, pipeline_stages_provisioning_mqtt +from azure.iot.device.provisioning.pipeline import pipeline_ops_provisioning +from azure.iot.device.provisioning.pipeline import constant as provisioning_constants +logger = logging.getLogger(__name__) + +class MQTTPipeline(object): + + def __init__(self, pipeline_configuration): + """ + Constructor for instantiating a pipeline + :param security_client: The security client which stores credentials + """ + self.responses_enabled = {(provisioning_constants.REGISTER): False} + self.on_connected = None + self.on_disconnected = None + self.on_message_received = None + self._registration_id = pipeline_configuration.registration_id + self._pipeline = pipeline_stages_base.PipelineRootStage(pipeline_configuration=pipeline_configuration).append_stage(pipeline_stages_base.SasTokenRenewalStage()).append_stage(pipeline_stages_provisioning.RegistrationStage()).append_stage(pipeline_stages_provisioning.PollingStatusStage()).append_stage(pipeline_stages_base.CoordinateRequestAndResponseStage()).append_stage(pipeline_stages_provisioning_mqtt.ProvisioningMQTTTranslationStage()).append_stage(pipeline_stages_base.AutoConnectStage()).append_stage(pipeline_stages_base.ReconnectStage()).append_stage(pipeline_stages_base.ConnectionLockStage()).append_stage(pipeline_stages_base.RetryStage()).append_stage(pipeline_stages_base.OpTimeoutStage()).append_stage(pipeline_stages_mqtt.MQTTTransportStage()) + + def _on_pipeline_event(event): + logger.error("Dropping unknown pipeline event {}".format(event.name)) + + def _on_connected(): + if self.on_connected: + self.on_connected("connected") + + def _on_disconnected(): + if self.on_disconnected: + self.on_disconnected("disconnected") + + self._pipeline.on_pipeline_event_handler = _on_pipeline_event + self._pipeline.on_connected_handler = _on_connected + self._pipeline.on_disconnected_handler = _on_disconnected + callback = EventedCallback() + op = pipeline_ops_base.InitializePipelineOperation(callback=callback) + self._pipeline.run_op(op) + callback.wait_for_completion() + + def connect(self, callback=None): + """ + Connect to the service. + + :param callback: callback which is called when the connection to the service is complete. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.ConnectionFailedError` + :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.ConnectionDroppedError` + :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.UnauthorizedError` + :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("connect called") + + def pipeline_callback(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_base.ConnectOperation(callback=pipeline_callback)) + + def disconnect(self, callback=None): + """ + Disconnect from the service. + + :param callback: callback which is called when the connection to the service has been disconnected + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.iothub.pipeline.exceptions.ProtocolClientError` + """ + logger.debug("disconnect called") + + def pipeline_callback(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_base.DisconnectOperation(callback=pipeline_callback)) + + def enable_responses(self, callback=None): + """ + Enable response from the DPS service by subscribing to the appropriate topics. + + :param callback: callback which is called when responses are enabled + """ + logger.debug("enable_responses called") + self.responses_enabled[provisioning_constants.REGISTER] = True + + def pipeline_callback(op, error): + callback(error=error) + + self._pipeline.run_op(pipeline_ops_base.EnableFeatureOperation(feature_name=(provisioning_constants.REGISTER), + callback=pipeline_callback)) + + def register(self, payload=None, callback=None): + """ + Register to the device provisioning service. + :param payload: Payload that can be sent with the registration request. + :param callback: callback which is called when the registration is done. + + The following exceptions are not "raised", but rather returned via the "error" parameter + when invoking "callback": + + :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.ConnectionFailedError` + :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.ConnectionDroppedError` + :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.UnauthorizedError` + :raises: :class:`azure.iot.device.provisioning.pipeline.exceptions.ProtocolClientError` + """ + + def on_complete(op, error): + if error: + callback(error=error, result=None) + else: + callback(result=(op.registration_result)) + + self._pipeline.run_op(pipeline_ops_provisioning.RegisterOperation(request_payload=payload, + registration_id=(self._registration_id), + callback=on_complete)) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/mqtt_topic_provisioning.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/mqtt_topic_provisioning.py new file mode 100644 index 0000000..57aca4b --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/mqtt_topic_provisioning.py @@ -0,0 +1,99 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/mqtt_topic_provisioning.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 4272 bytes +import logging +import six.moves.urllib as urllib +logger = logging.getLogger(__name__) + +def _get_topic_base(): + """ + return the string that creates the beginning of all topics for DPS + """ + return "$dps/registrations/" + + +def get_register_topic_for_subscribe(): + """ + :return: The topic string used to subscribe for receiving future responses from DPS. + It is of the format "$dps/registrations/res/#" + """ + return _get_topic_base() + "res/#" + + +def get_register_topic_for_publish(request_id): + """ + :return: The topic string used to send a registration. It is of the format + "$dps/registrations/PUT/iotdps-register/?$rid= + """ + return (_get_topic_base() + "PUT/iotdps-register/?$rid={request_id}").format(request_id=urllib.parse.quote((str(request_id)), safe="")) + + +def get_query_topic_for_publish(request_id, operation_id): + """ + :return: The topic string used to send a query. It is of the format + "$dps/registrations/GET/iotdps-get-operationstatus/?$rid=&operationId= + """ + return (_get_topic_base() + "GET/iotdps-get-operationstatus/?$rid={request_id}&operationId={operation_id}").format(request_id=urllib.parse.quote((str(request_id)), safe=""), + operation_id=urllib.parse.quote((str(operation_id)), safe="")) + + +def _get_topic_for_response(): + """ + return the topic string used to publish telemetry + """ + return _get_topic_base() + "res/" + + +def is_dps_response_topic(topic): + """ + Topics for responses from DPS are of the following format: + $dps/registrations/res//?$=&=...&= + :param topic: The topic string + """ + if _get_topic_for_response() in topic: + return True + return False + + +def extract_properties_from_dps_response_topic(topic): + """ + Topics for responses from DPS are of the following format: + $dps/registrations/res//?$=&=...&= + Extract key=value pairs from the latter part of the topic. + :param topic: The topic string + :return: a dictionary of property keys mapped to property values. + """ + topic_parts = topic.split("$") + properties = topic_parts[2] + if properties: + key_value_pairs = properties.split("&") + key_value_dict = {} + for entry in key_value_pairs: + pair = entry.split("=") + key = urllib.parse.unquote(pair[0]) + value = urllib.parse.unquote(pair[1]) + if key_value_dict.get(key): + raise ValueError("Duplicate keys in DPS response topic") + else: + key_value_dict[key] = value + + return key_value_dict + + +def extract_status_code_from_dps_response_topic(topic): + """ + Topics for responses from DPS are of the following format: + $dps/registrations/res//?$=&=...&= + Extract the status code part from the topic. + :param topic: The topic string + :return: The status code from the DPS response topic, as a string + """ + POS_STATUS_CODE_IN_TOPIC = 3 + topic_parts = topic.split("$") + url_parts = topic_parts[1].split("/") + status_code = url_parts[POS_STATUS_CODE_IN_TOPIC] + return urllib.parse.unquote(status_code) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py new file mode 100644 index 0000000..fdf723b --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py @@ -0,0 +1,51 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/pipeline_ops_provisioning.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2908 bytes +from azure.iot.device.common.pipeline.pipeline_ops_base import PipelineOperation + +class RegisterOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to send a registration request\n to an Device Provisioning Service.\n\n This operation is in the group of DPS operations because it is very specific to the DPS client.\n " + + def __init__(self, request_payload, registration_id, callback, registration_result=None): + """ + Initializer for RegisterOperation objects. + + :param request_payload: The request that we are sending to the service + :param registration_id: The registration ID is used to uniquely identify a device in the Device Provisioning Service. + :param Function callback: The function that gets called when this operation is complete or has failed. + The callback function must accept A PipelineOperation object which indicates the specific operation which + has completed or failed. + """ + super(RegisterOperation, self).__init__(callback=callback) + self.request_payload = request_payload + self.registration_id = registration_id + self.registration_result = registration_result + self.retry_after_timer = None + self.polling_timer = None + self.provisioning_timeout_timer = None + + +class PollStatusOperation(PipelineOperation): + __doc__ = "\n A PipelineOperation object which contains arguments used to send a registration request\n to an Device Provisioning Service.\n\n This operation is in the group of DPS operations because it is very specific to the DPS client.\n " + + def __init__(self, operation_id, request_payload, callback, registration_result=None): + """ + Initializer for PollStatusOperation objects. + + :param operation_id: The id of the existing operation for which the polling was started. + :param request_payload: The request that we are sending to the service + :param Function callback: The function that gets called when this operation is complete or has failed. + The callback function must accept A PipelineOperation object which indicates the specific operation which + has completed or failed. + """ + super(PollStatusOperation, self).__init__(callback=callback) + self.operation_id = operation_id + self.request_payload = request_payload + self.registration_result = registration_result + self.retry_after_timer = None + self.polling_timer = None + self.provisioning_timeout_timer = None diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py new file mode 100644 index 0000000..69db985 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py @@ -0,0 +1,291 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 21005 bytes +from azure.iot.device.common.pipeline import pipeline_ops_base, pipeline_thread +from azure.iot.device.common.pipeline.pipeline_stages_base import PipelineStage +from . import pipeline_ops_provisioning +from azure.iot.device import exceptions +from azure.iot.device.provisioning.pipeline import constant +from azure.iot.device.provisioning.models.registration_result import RegistrationResult, RegistrationState +import logging, weakref, json +from threading import Timer +import time +logger = logging.getLogger(__name__) + +class CommonProvisioningStage(PipelineStage): + __doc__ = "\n This is a super stage that the RegistrationStage and PollingStatusStage of\n provisioning would both use. It contains some common functions like decoding response\n and retrieving error, retrieving registration status, retrieving operation id\n and forming a complete result.\n " + + @pipeline_thread.runs_on_pipeline_thread + def _clear_timeout_timer(self, op, error): + """ + Clearing timer for provisioning operations (Register and PollStatus) + when they respond back from service. + """ + if op.provisioning_timeout_timer: + logger.debug("{}({}): Cancelling provisioning timeout timer".format(self.name, op.name)) + op.provisioning_timeout_timer.cancel() + op.provisioning_timeout_timer = None + + @staticmethod + def _decode_response(provisioning_op): + return json.loads(provisioning_op.response_body.decode("utf-8")) + + @staticmethod + def _form_complete_result(operation_id, decoded_response, status): + """ + Create the registration result from the complete decoded json response for details regarding the registration process. + """ + decoded_state = decoded_response.get("registrationState", None) + registration_state = None + if decoded_state is not None: + registration_state = RegistrationState(device_id=(decoded_state.get("deviceId", None)), + assigned_hub=(decoded_state.get("assignedHub", None)), + sub_status=(decoded_state.get("substatus", None)), + created_date_time=(decoded_state.get("createdDateTimeUtc", None)), + last_update_date_time=(decoded_state.get("lastUpdatedDateTimeUtc", None)), + etag=(decoded_state.get("etag", None)), + payload=(decoded_state.get("payload", None))) + registration_result = RegistrationResult(operation_id=operation_id, + status=status, + registration_state=registration_state) + return registration_result + + def _process_service_error_status_code(self, original_provisioning_op, request_response_op): + logger.info("{stage_name}({op_name}): Received error with status code {status_code} for {prov_op_name} request operation".format(stage_name=(self.name), + op_name=(request_response_op.name), + prov_op_name=(request_response_op.request_type), + status_code=(request_response_op.status_code))) + logger.debug("{stage_name}({op_name}): Response body: {body}".format(stage_name=(self.name), + op_name=(request_response_op.name), + body=(request_response_op.response_body))) + original_provisioning_op.complete(error=(exceptions.ServiceError("{prov_op_name} request returned a service error status code {status_code}".format(prov_op_name=(request_response_op.request_type), + status_code=(request_response_op.status_code))))) + + def _process_retry_status_code(self, error, original_provisioning_op, request_response_op): + retry_interval = int(request_response_op.retry_after, 10) if request_response_op.retry_after is not None else constant.DEFAULT_POLLING_INTERVAL + self_weakref = weakref.ref(self) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def do_retry_after(): + this = self_weakref() + logger.info("{stage_name}({op_name}): retrying".format(stage_name=(this.name), + op_name=(request_response_op.name))) + original_provisioning_op.retry_after_timer.cancel() + original_provisioning_op.retry_after_timer = None + original_provisioning_op.completed = False + this.run_op(original_provisioning_op) + + logger.info("{stage_name}({op_name}): Op needs retry with interval {interval} because of {error}. Setting timer.".format(stage_name=(self.name), + op_name=(request_response_op.name), + interval=retry_interval, + error=error)) + logger.debug("{}({}): Creating retry timer".format(self.name, request_response_op.name)) + original_provisioning_op.retry_after_timer = Timer(retry_interval, do_retry_after) + original_provisioning_op.retry_after_timer.start() + + @staticmethod + def _process_failed_and_assigned_registration_status(error, operation_id, decoded_response, registration_status, original_provisioning_op, request_response_op): + complete_registration_result = CommonProvisioningStage._form_complete_result(operation_id=operation_id, + decoded_response=decoded_response, + status=registration_status) + original_provisioning_op.registration_result = complete_registration_result + if registration_status == "failed": + error = exceptions.ServiceError("Query Status operation returned a failed registration status with a status code of {status_code}".format(status_code=(request_response_op.status_code))) + original_provisioning_op.complete(error=error) + + @staticmethod + def _process_unknown_registration_status(registration_status, original_provisioning_op, request_response_op): + error = exceptions.ServiceError("Query Status Operation encountered an invalid registration status {status} with a status code of {status_code}".format(status=registration_status, + status_code=(request_response_op.status_code))) + original_provisioning_op.complete(error=error) + + +class PollingStatusStage(CommonProvisioningStage): + __doc__ = "\n This stage is responsible for sending the query request once initial response\n is received from the registration response.\n Upon the receipt of the response this stage decides whether\n to send another query request or complete the procedure.\n " + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_provisioning.PollStatusOperation): + query_status_op = op + self_weakref = weakref.ref(self) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def query_timeout(): + this = self_weakref() + logger.info("{stage_name}({op_name}): returning timeout error".format(stage_name=(this.name), + op_name=(op.name))) + query_status_op.complete(error=(exceptions.ServiceError("Operation timed out before provisioning service could respond for {op_type} operation".format(op_type=(constant.QUERY))))) + + logger.debug("{}({}): Creating provisioning timeout timer".format(self.name, op.name)) + query_status_op.provisioning_timeout_timer = Timer(constant.DEFAULT_TIMEOUT_INTERVAL, query_timeout) + query_status_op.provisioning_timeout_timer.start() + + def on_query_response(op, error): + self._clear_timeout_timer(query_status_op, error) + logger.debug("{stage_name}({op_name}): Received response with status code {status_code} for PollStatusOperation with operation id {oper_id}".format(stage_name=(self.name), + op_name=(op.name), + status_code=(op.status_code), + oper_id=(op.query_params["operation_id"]))) + if error: + logger.debug("{stage_name}({op_name}): Received error for {prov_op_name} operation".format(stage_name=(self.name), + op_name=(op.name), + prov_op_name=(op.request_type))) + query_status_op.complete(error=error) + else: + if 300 <= op.status_code < 429: + self._process_service_error_status_code(query_status_op, op) + else: + if op.status_code >= 429: + self._process_retry_status_code(error, query_status_op, op) + else: + decoded_response = self._decode_response(op) + operation_id = decoded_response.get("operationId", None) + registration_status = decoded_response.get("status", None) + if registration_status == "assigning": + polling_interval = int(op.retry_after, 10) if op.retry_after is not None else constant.DEFAULT_POLLING_INTERVAL + self_weakref = weakref.ref(self) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def do_polling(): + this = self_weakref() + logger.info("{stage_name}({op_name}): retrying".format(stage_name=(this.name), + op_name=(op.name))) + query_status_op.polling_timer.cancel() + query_status_op.polling_timer = None + query_status_op.completed = False + this.run_op(query_status_op) + + logger.debug("{stage_name}({op_name}): Op needs retry with interval {interval} because of {error}. Setting timer.".format(stage_name=(self.name), + op_name=(op.name), + interval=polling_interval, + error=error)) + logger.debug("{}({}): Creating polling timer".format(self.name, op.name)) + query_status_op.polling_timer = Timer(polling_interval, do_polling) + query_status_op.polling_timer.start() + else: + if registration_status == "assigned" or registration_status == "failed": + self._process_failed_and_assigned_registration_status(error=error, + operation_id=operation_id, + decoded_response=decoded_response, + registration_status=registration_status, + original_provisioning_op=query_status_op, + request_response_op=op) + else: + self._process_unknown_registration_status(registration_status=registration_status, + original_provisioning_op=query_status_op, + request_response_op=op) + + self.send_op_down(pipeline_ops_base.RequestAndResponseOperation(request_type=(constant.QUERY), + method="GET", + resource_location="/", + query_params={"operation_id": (query_status_op.operation_id)}, + request_body=(query_status_op.request_payload), + callback=on_query_response)) + else: + super(PollingStatusStage, self)._run_op(op) + + +class RegistrationStage(CommonProvisioningStage): + __doc__ = "\n This is the first stage that decides converts a registration request\n into a normal request and response operation.\n Upon the receipt of the response this stage decides whether\n to send another registration request or send a query request.\n Depending on the status and result of the response\n this stage may also complete the registration process.\n " + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_provisioning.RegisterOperation): + initial_register_op = op + self_weakref = weakref.ref(self) + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def register_timeout(): + this = self_weakref() + logger.info("{stage_name}({op_name}): returning timeout error".format(stage_name=(this.name), + op_name=(op.name))) + initial_register_op.complete(error=(exceptions.ServiceError("Operation timed out before provisioning service could respond for {op_type} operation".format(op_type=(constant.REGISTER))))) + + logger.debug("{}({}): Creating provisioning timeout timer".format(self.name, op.name)) + initial_register_op.provisioning_timeout_timer = Timer(constant.DEFAULT_TIMEOUT_INTERVAL, register_timeout) + initial_register_op.provisioning_timeout_timer.start() + + def on_registration_response(op, error): + self._clear_timeout_timer(initial_register_op, error) + logger.debug("{stage_name}({op_name}): Received response with status code {status_code} for RegisterOperation".format(stage_name=(self.name), + op_name=(op.name), + status_code=(op.status_code))) + if error: + logger.info("{stage_name}({op_name}): Received error for {prov_op_name} operation".format(stage_name=(self.name), + op_name=(op.name), + prov_op_name=(op.request_type))) + initial_register_op.complete(error=error) + else: + if 300 <= op.status_code < 429: + self._process_service_error_status_code(initial_register_op, op) + else: + if op.status_code >= 429: + self._process_retry_status_code(error, initial_register_op, op) + else: + decoded_response = self._decode_response(op) + operation_id = decoded_response.get("operationId", None) + registration_status = decoded_response.get("status", None) + if registration_status == "assigning": + self_weakref = weakref.ref(self) + + def copy_result_to_original_op(op, error): + logger.debug("Copying registration result from Query Status Op to Registration Op") + initial_register_op.registration_result = op.registration_result + initial_register_op.error = error + + @pipeline_thread.invoke_on_pipeline_thread_nowait + def do_query_after_interval(): + this = self_weakref() + initial_register_op.polling_timer.cancel() + initial_register_op.polling_timer = None + logger.info("{stage_name}({op_name}): polling".format(stage_name=(this.name), + op_name=(op.name))) + query_worker_op = initial_register_op.spawn_worker_op(worker_op_type=(pipeline_ops_provisioning.PollStatusOperation), + request_payload=" ", + operation_id=operation_id, + callback=copy_result_to_original_op) + self.send_op_down(query_worker_op) + + logger.debug("{stage_name}({op_name}): Op will transition into polling after interval {interval}. Setting timer.".format(stage_name=(self.name), + op_name=(op.name), + interval=(constant.DEFAULT_POLLING_INTERVAL))) + logger.debug("{}({}): Creating polling timer".format(self.name, op.name)) + initial_register_op.polling_timer = Timer(constant.DEFAULT_POLLING_INTERVAL, do_query_after_interval) + initial_register_op.polling_timer.start() + else: + if registration_status == "failed" or registration_status == "assigned": + self._process_failed_and_assigned_registration_status(error=error, + operation_id=operation_id, + decoded_response=decoded_response, + registration_status=registration_status, + original_provisioning_op=initial_register_op, + request_response_op=op) + else: + self._process_unknown_registration_status(registration_status=registration_status, + original_provisioning_op=initial_register_op, + request_response_op=op) + + registration_payload = DeviceRegistrationPayload(registration_id=(initial_register_op.registration_id), + custom_payload=(initial_register_op.request_payload)) + self.send_op_down(pipeline_ops_base.RequestAndResponseOperation(request_type=(constant.REGISTER), + method="PUT", + resource_location="/", + request_body=(registration_payload.get_json_string()), + callback=on_registration_response)) + else: + super(RegistrationStage, self)._run_op(op) + + +class DeviceRegistrationPayload(object): + __doc__ = "\n The class representing the payload that needs to be sent to the service.\n " + + def __init__(self, registration_id, custom_payload=None): + self.registrationId = registration_id + self.payload = custom_payload + + def get_json_string(self): + return json.dumps(self, default=(lambda o: o.__dict__), sort_keys=True) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning_mqtt.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning_mqtt.py new file mode 100644 index 0000000..1774c73 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning_mqtt.py @@ -0,0 +1,103 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/pipeline/pipeline_stages_provisioning_mqtt.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 6920 bytes +import logging +import six.moves.urllib as urllib +from azure.iot.device.common import version_compat +from azure.iot.device.common.pipeline import pipeline_ops_base, pipeline_ops_mqtt, pipeline_events_mqtt, pipeline_thread, pipeline_events_base, pipeline_exceptions +from azure.iot.device.common.pipeline.pipeline_stages_base import PipelineStage +from azure.iot.device.provisioning.pipeline import mqtt_topic_provisioning +from azure.iot.device.provisioning.pipeline import pipeline_ops_provisioning +from azure.iot.device import constant as pkg_constant +from . import constant as pipeline_constant +from azure.iot.device import user_agent +logger = logging.getLogger(__name__) + +class ProvisioningMQTTTranslationStage(PipelineStage): + __doc__ = "\n PipelineStage which converts other Provisioning pipeline operations into MQTT operations. This stage also\n converts MQTT pipeline events into Provisioning pipeline events.\n " + + def __init__(self): + super(ProvisioningMQTTTranslationStage, self).__init__() + self.action_to_topic = {} + + @pipeline_thread.runs_on_pipeline_thread + def _run_op(self, op): + if isinstance(op, pipeline_ops_base.InitializePipelineOperation): + client_id = self.pipeline_root.pipeline_configuration.registration_id + query_param_seq = [ + ( + "api-version", pkg_constant.PROVISIONING_API_VERSION), + ( + "ClientVersion", user_agent.get_provisioning_user_agent())] + username = "{id_scope}/registrations/{registration_id}/{query_params}".format(id_scope=(self.pipeline_root.pipeline_configuration.id_scope), + registration_id=(self.pipeline_root.pipeline_configuration.registration_id), + query_params=version_compat.urlencode(query_param_seq, + quote_via=(urllib.parse.quote))) + op.username = username + op.client_id = client_id + self.send_op_down(op) + else: + if isinstance(op, pipeline_ops_base.RequestOperation): + if op.request_type == pipeline_constant.REGISTER: + topic = mqtt_topic_provisioning.get_register_topic_for_publish(request_id=(op.request_id)) + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTPublishOperation), + topic=topic, + payload=(op.request_body)) + self.send_op_down(worker_op) + else: + if op.request_type == pipeline_constant.QUERY: + topic = mqtt_topic_provisioning.get_query_topic_for_publish(request_id=(op.request_id), + operation_id=(op.query_params["operation_id"])) + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTPublishOperation), + topic=topic, + payload=(op.request_body)) + self.send_op_down(worker_op) + else: + raise pipeline_exceptions.OperationError("RequestOperation request_type {} not supported".format(op.request_type)) + else: + if isinstance(op, pipeline_ops_base.EnableFeatureOperation): + if not op.feature_name == pipeline_constant.REGISTER: + raise pipeline_exceptions.OperationError("Trying to enable/disable invalid feature - {}".format(op.feature_name)) + topic = mqtt_topic_provisioning.get_register_topic_for_subscribe() + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTSubscribeOperation), + topic=topic) + self.send_op_down(worker_op) + else: + if isinstance(op, pipeline_ops_base.DisableFeatureOperation): + if not op.feature_name == pipeline_constant.REGISTER: + raise pipeline_exceptions.OperationError("Trying to enable/disable invalid feature - {}".format(op.feature_name)) + topic = mqtt_topic_provisioning.get_register_topic_for_subscribe() + worker_op = op.spawn_worker_op(worker_op_type=(pipeline_ops_mqtt.MQTTUnsubscribeOperation), + topic=topic) + self.send_op_down(worker_op) + else: + super(ProvisioningMQTTTranslationStage, self)._run_op(op) + + @pipeline_thread.runs_on_pipeline_thread + def _handle_pipeline_event(self, event): + """ + Pipeline Event handler function to convert incoming MQTT messages into the appropriate DPS + events, based on the topic of the message + """ + if isinstance(event, pipeline_events_mqtt.IncomingMQTTMessageEvent): + topic = event.topic + if mqtt_topic_provisioning.is_dps_response_topic(topic): + logger.debug("Received payload:{payload} on topic:{topic}".format(payload=(event.payload), + topic=topic)) + key_values = mqtt_topic_provisioning.extract_properties_from_dps_response_topic(topic) + retry_after = key_values.get("retry-after", None) + status_code = mqtt_topic_provisioning.extract_status_code_from_dps_response_topic(topic) + request_id = key_values["rid"] + self.send_event_up(pipeline_events_base.ResponseEvent(request_id=request_id, + status_code=(int(status_code, 10)), + response_body=(event.payload), + retry_after=retry_after)) + else: + logger.debug("Unknown topic: {} passing up to next handler".format(topic)) + self.send_event_up(event) + else: + self.send_event_up(event) diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/provisioning_device_client.py b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/provisioning_device_client.py new file mode 100644 index 0000000..a2f477a --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/provisioning/provisioning_device_client.py @@ -0,0 +1,108 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/provisioning/provisioning_device_client.py +# Compiled at: 2024-04-18 03:12:59 +# Size of source mod 2**32: 4354 bytes +""" +This module contains user-facing synchronous Provisioning Device Client for Azure Provisioning +Device SDK. This client uses Symmetric Key and X509 authentication to register devices with an +IoT Hub via the Device Provisioning Service. +""" +import logging +from azure.iot.device.common.evented_callback import EventedCallback +from .abstract_provisioning_device_client import AbstractProvisioningDeviceClient +from .abstract_provisioning_device_client import log_on_register_complete +from azure.iot.device.provisioning.pipeline import constant as dps_constant +from .pipeline import exceptions as pipeline_exceptions +from azure.iot.device import exceptions +logger = logging.getLogger(__name__) + +def handle_result(callback): + try: + return callback.wait_for_completion() + except pipeline_exceptions.ConnectionDroppedError as e: + try: + raise exceptions.ConnectionDroppedError(message="Lost connection to the provisioning server", + cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ConnectionFailedError as e: + try: + raise exceptions.ConnectionFailedError(message="Could not connect to the provisioning server", + cause=e) + finally: + e = None + del e + + except pipeline_exceptions.UnauthorizedError as e: + try: + raise exceptions.CredentialError(message="Credentials invalid, could not connect", cause=e) + finally: + e = None + del e + + except pipeline_exceptions.ProtocolClientError as e: + try: + raise exceptions.ClientError(message="Error in the provisioning client", cause=e) + finally: + e = None + del e + + except Exception as e: + try: + raise exceptions.ClientError(message="Unexpected failure", cause=e) + finally: + e = None + del e + + +class ProvisioningDeviceClient(AbstractProvisioningDeviceClient): + __doc__ = "\n Client which can be used to run the registration of a device with provisioning service\n using Symmetric Key or X509 authentication.\n " + + def register(self): + """ + Register the device with the provisioning service + + This is a synchronous call, meaning that this function will not return until the + registration process has completed successfully or the attempt has resulted in a failure. + Before returning, the client will also disconnect from the provisioning service. + If a registration attempt is made while a previous registration is in progress it may + throw an error. + + :returns: RegistrationResult indicating the result of the registration. + :rtype: :class:`azure.iot.device.RegistrationResult` + + :raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid + and a connection cannot be established. + :raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if establishing a + connection results in failure. + :raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost + during execution. + :raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure + during execution. + """ + logger.info("Registering with Provisioning Service...") + if not self._pipeline.responses_enabled[dps_constant.REGISTER]: + self._enable_responses() + register_complete = EventedCallback(return_arg_name="result") + self._pipeline.register(payload=(self._provisioning_payload), callback=register_complete) + result = handle_result(register_complete) + log_on_register_complete(result) + return result + + def _enable_responses(self): + """Enable to receive responses from Device Provisioning Service. + + This is a synchronous call, meaning that this function will not return until the feature + has been enabled. + + """ + logger.info("Enabling reception of response from Device Provisioning Service...") + subscription_complete = EventedCallback() + self._pipeline.enable_responses(callback=subscription_complete) + handle_result(subscription_complete) + logger.info("Successfully subscribed to Device Provisioning Service to receive responses") diff --git a/APPS_UNCOMPILED/lib/azure/iot/device/user_agent.py b/APPS_UNCOMPILED/lib/azure/iot/device/user_agent.py new file mode 100644 index 0000000..e644366 --- /dev/null +++ b/APPS_UNCOMPILED/lib/azure/iot/device/user_agent.py @@ -0,0 +1,38 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/azure/iot/device/user_agent.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1357 bytes +"""This module is for creating agent strings for all clients""" +import platform +from azure.iot.device.constant import VERSION, IOTHUB_IDENTIFIER, PROVISIONING_IDENTIFIER +python_runtime = platform.python_version() +os_type = platform.system() +os_release = platform.version() +architecture = platform.machine() + +def _get_common_user_agent(): + return "({python_runtime};{os_type} {os_release};{architecture})".format(python_runtime=python_runtime, + os_type=os_type, + os_release=os_release, + architecture=architecture) + + +def get_iothub_user_agent(): + """ + Create the user agent for IotHub + """ + return "{iothub_iden}/{version}{common}".format(iothub_iden=IOTHUB_IDENTIFIER, + version=VERSION, + common=(_get_common_user_agent())) + + +def get_provisioning_user_agent(): + """ + Create the user agent for Provisioning + """ + return "{provisioning_iden}/{version}{common}".format(provisioning_iden=PROVISIONING_IDENTIFIER, + version=VERSION, + common=(_get_common_user_agent())) diff --git a/APPS_UNCOMPILED/lib/chardet/__init__.py b/APPS_UNCOMPILED/lib/chardet/__init__.py new file mode 100644 index 0000000..588b4fe --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/__init__.py @@ -0,0 +1,62 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/__init__.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 3271 bytes +from .universaldetector import UniversalDetector +from .enums import InputState +from .version import __version__, VERSION +__all__ = [ + 'UniversalDetector', 'detect', 'detect_all', '__version__', 'VERSION'] + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError("Expected object of type bytes or bytearray, got: {}".format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() + + +def detect_all(byte_str): + """ + Detect all the possible encodings of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError("Expected object of type bytes or bytearray, got: {}".format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + detector.close() + if detector._input_state == InputState.HIGH_BYTE: + results = [] + for prober in detector._charset_probers: + if prober.get_confidence() > detector.MINIMUM_THRESHOLD: + charset_name = prober.charset_name + lower_charset_name = prober.charset_name.lower() + if lower_charset_name.startswith("iso-8859"): + if detector._has_win_bytes: + charset_name = detector.ISO_WIN_MAP.get(lower_charset_name, charset_name) + results.append({'encoding':charset_name, + 'confidence':(prober.get_confidence)(), + 'language':prober.language}) + + if len(results) > 0: + return sorted(results, key=(lambda result: -result["confidence"])) + return [ + detector.result] diff --git a/APPS_UNCOMPILED/lib/chardet/big5freq.py b/APPS_UNCOMPILED/lib/chardet/big5freq.py new file mode 100644 index 0000000..4c1ee6a --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/big5freq.py @@ -0,0 +1,558 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/big5freq.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 31254 bytes +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 +BIG5_TABLE_SIZE = 5376 +BIG5_CHAR_TO_FREQ_ORDER = (1, 1801, 1506, 255, 1431, 198, 9, 82, 6, 5008, 177, 202, + 3681, 1256, 2821, 110, 3814, 33, 3274, 261, 76, 44, 2114, + 16, 2946, 2187, 1176, 659, 3971, 26, 3451, 2653, 1198, + 3972, 3350, 4202, 410, 2215, 302, 590, 361, 1964, 8, 204, + 58, 4510, 5009, 1932, 63, 5010, 5011, 317, 1614, 75, 222, + 159, 4203, 2417, 1480, 5012, 3555, 3091, 224, 2822, 3682, + 3, 10, 3973, 1471, 29, 2787, 1135, 2866, 1940, 873, 130, + 3275, 1123, 312, 5013, 4511, 2052, 507, 252, 682, 5014, + 142, 1915, 124, 206, 2947, 34, 3556, 3204, 64, 604, 5015, + 2501, 1977, 1978, 155, 1991, 645, 641, 1606, 5016, 3452, + 337, 72, 406, 5017, 80, 630, 238, 3205, 1509, 263, 939, + 1092, 2654, 756, 1440, 1094, 3453, 449, 69, 2987, 591, + 179, 2096, 471, 115, 2035, 1844, 60, 50, 2988, 134, 806, + 1869, 734, 2036, 3454, 180, 995, 1607, 156, 537, 2907, + 688, 5018, 319, 1305, 779, 2145, 514, 2379, 298, 4512, + 359, 2502, 90, 2716, 1338, 663, 11, 906, 1099, 2553, 20, + 2441, 182, 532, 1716, 5019, 732, 1376, 4204, 1311, 1420, + 3206, 25, 2317, 1056, 113, 399, 382, 1950, 242, 3455, + 2474, 529, 3276, 475, 1447, 3683, 5020, 117, 21, 656, + 810, 1297, 2300, 2334, 3557, 5021, 126, 4205, 706, 456, + 150, 613, 4513, 71, 1118, 2037, 4206, 145, 3092, 85, 835, + 486, 2115, 1246, 1426, 428, 727, 1285, 1015, 800, 106, + 623, 303, 1281, 5022, 2128, 2359, 347, 3815, 221, 3558, + 3135, 5023, 1956, 1153, 4207, 83, 296, 1199, 3093, 192, + 624, 93, 5024, 822, 1898, 2823, 3136, 795, 2065, 991, + 1554, 1542, 1592, 27, 43, 2867, 859, 139, 1456, 860, 4514, + 437, 712, 3974, 164, 2397, 3137, 695, 211, 3037, 2097, + 195, 3975, 1608, 3559, 3560, 3684, 3976, 234, 811, 2989, + 2098, 3977, 2233, 1441, 3561, 1615, 2380, 668, 2077, 1638, + 305, 228, 1664, 4515, 467, 415, 5025, 262, 2099, 1593, + 239, 108, 300, 200, 1033, 512, 1247, 2078, 5026, 5027, + 2176, 3207, 3685, 2682, 593, 845, 1062, 3277, 88, 1723, + 2038, 3978, 1951, 212, 266, 152, 149, 468, 1899, 4208, + 4516, 77, 187, 5028, 3038, 37, 5, 2990, 5029, 3979, 5030, + 5031, 39, 2524, 4517, 2908, 3208, 2079, 55, 148, 74, 4518, + 545, 483, 1474, 1029, 1665, 217, 1870, 1531, 3138, 1104, + 2655, 4209, 24, 172, 3562, 900, 3980, 3563, 3564, 4519, + 32, 1408, 2824, 1312, 329, 487, 2360, 2251, 2717, 784, + 2683, 4, 3039, 3351, 1427, 1789, 188, 109, 499, 5032, + 3686, 1717, 1790, 888, 1217, 3040, 4520, 5033, 3565, 5034, + 3352, 1520, 3687, 3981, 196, 1034, 775, 5035, 5036, 929, + 1816, 249, 439, 38, 5037, 1063, 5038, 794, 3982, 1435, + 2301, 46, 178, 3278, 2066, 5039, 2381, 5040, 214, 1709, + 4521, 804, 35, 707, 324, 3688, 1601, 2554, 140, 459, 4210, + 5041, 5042, 1365, 839, 272, 978, 2262, 2580, 3456, 2129, + 1363, 3689, 1423, 697, 100, 3094, 48, 70, 1231, 495, 3139, + 2196, 5043, 1294, 5044, 2080, 462, 586, 1042, 3279, 853, + 256, 988, 185, 2382, 3457, 1698, 434, 1084, 5045, 3458, + 314, 2625, 2788, 4522, 2335, 2336, 569, 2285, 637, 1817, + 2525, 757, 1162, 1879, 1616, 3459, 287, 1577, 2116, 768, + 4523, 1671, 2868, 3566, 2526, 1321, 3816, 909, 2418, 5046, + 4211, 933, 3817, 4212, 2053, 2361, 1222, 4524, 765, 2419, + 1322, 786, 4525, 5047, 1920, 1462, 1677, 2909, 1699, 5048, + 4526, 1424, 2442, 3140, 3690, 2600, 3353, 1775, 1941, + 3460, 3983, 4213, 309, 1369, 1130, 2825, 364, 2234, 1653, + 1299, 3984, 3567, 3985, 3986, 2656, 525, 1085, 3041, 902, + 2001, 1475, 964, 4527, 421, 1845, 1415, 1057, 2286, 940, + 1364, 3141, 376, 4528, 4529, 1381, 7, 2527, 983, 2383, + 336, 1710, 2684, 1846, 321, 3461, 559, 1131, 3042, 2752, + 1809, 1132, 1313, 265, 1481, 1858, 5049, 352, 1203, 2826, + 3280, 167, 1089, 420, 2827, 776, 792, 1724, 3568, 4214, + 2443, 3281, 5050, 4215, 5051, 446, 229, 333, 2753, 901, + 3818, 1200, 1557, 4530, 2657, 1921, 395, 2754, 2685, 3819, + 4216, 1836, 125, 916, 3209, 2626, 4531, 5052, 5053, 3820, + 5054, 5055, 5056, 4532, 3142, 3691, 1133, 2555, 1757, + 3462, 1510, 2318, 1409, 3569, 5057, 2146, 438, 2601, 2910, + 2384, 3354, 1068, 958, 3043, 461, 311, 2869, 2686, 4217, + 1916, 3210, 4218, 1979, 383, 750, 2755, 2627, 4219, 274, + 539, 385, 1278, 1442, 5058, 1154, 1965, 384, 561, 210, + 98, 1295, 2556, 3570, 5059, 1711, 2420, 1482, 3463, 3987, + 2911, 1257, 129, 5060, 3821, 642, 523, 2789, 2790, 2658, + 5061, 141, 2235, 1333, 68, 176, 441, 876, 907, 4220, 603, + 2602, 710, 171, 3464, 404, 549, 18, 3143, 2398, 1410, + 3692, 1666, 5062, 3571, 4533, 2912, 4534, 5063, 2991, + 368, 5064, 146, 366, 99, 871, 3693, 1543, 748, 807, 1586, + 1185, 22, 2263, 379, 3822, 3211, 5065, 3212, 505, 1942, + 2628, 1992, 1382, 2319, 5066, 380, 2362, 218, 702, 1818, + 1248, 3465, 3044, 3572, 3355, 3282, 5067, 2992, 3694, + 930, 3283, 3823, 5068, 59, 5069, 585, 601, 4221, 497, + 3466, 1112, 1314, 4535, 1802, 5070, 1223, 1472, 2177, + 5071, 749, 1837, 690, 1900, 3824, 1773, 3988, 1476, 429, + 1043, 1791, 2236, 2117, 917, 4222, 447, 1086, 1629, 5072, + 556, 5073, 5074, 2021, 1654, 844, 1090, 105, 550, 966, + 1758, 2828, 1008, 1783, 686, 1095, 5075, 2287, 793, 1602, + 5076, 3573, 2603, 4536, 4223, 2948, 2302, 4537, 3825, + 980, 2503, 544, 353, 527, 4538, 908, 2687, 2913, 5077, + 381, 2629, 1943, 1348, 5078, 1341, 1252, 560, 3095, 5079, + 3467, 2870, 5080, 2054, 973, 886, 2081, 143, 4539, 5081, + 5082, 157, 3989, 496, 4224, 57, 840, 540, 2039, 4540, + 4541, 3468, 2118, 1445, 970, 2264, 1748, 1966, 2082, 4225, + 3144, 1234, 1776, 3284, 2829, 3695, 773, 1206, 2130, 1066, + 2040, 1326, 3990, 1738, 1725, 4226, 279, 3145, 51, 1544, + 2604, 423, 1578, 2131, 2067, 173, 4542, 1880, 5083, 5084, + 1583, 264, 610, 3696, 4543, 2444, 280, 154, 5085, 5086, + 5087, 1739, 338, 1282, 3096, 693, 2871, 1411, 1074, 3826, + 2445, 5088, 4544, 5089, 5090, 1240, 952, 2399, 5091, 2914, + 1538, 2688, 685, 1483, 4227, 2475, 1436, 953, 4228, 2055, + 4545, 671, 2400, 79, 4229, 2446, 3285, 608, 567, 2689, + 3469, 4230, 4231, 1691, 393, 1261, 1792, 2401, 5092, 4546, + 5093, 5094, 5095, 5096, 1383, 1672, 3827, 3213, 1464, + 522, 1119, 661, 1150, 216, 675, 4547, 3991, 1432, 3574, + 609, 4548, 2690, 2402, 5097, 5098, 5099, 4232, 3045, 0, + 5100, 2476, 315, 231, 2447, 301, 3356, 4549, 2385, 5101, + 233, 4233, 3697, 1819, 4550, 4551, 5102, 96, 1777, 1315, + 2083, 5103, 257, 5104, 1810, 3698, 2718, 1139, 1820, 4234, + 2022, 1124, 2164, 2791, 1778, 2659, 5105, 3097, 363, 1655, + 3214, 5106, 2993, 5107, 5108, 5109, 3992, 1567, 3993, + 718, 103, 3215, 849, 1443, 341, 3357, 2949, 1484, 5110, + 1712, 127, 67, 339, 4235, 2403, 679, 1412, 821, 5111, + 5112, 834, 738, 351, 2994, 2147, 846, 235, 1497, 1881, + 418, 1993, 3828, 2719, 186, 1100, 2148, 2756, 3575, 1545, + 1355, 2950, 2872, 1377, 583, 3994, 4236, 2581, 2995, 5113, + 1298, 3699, 1078, 2557, 3700, 2363, 78, 3829, 3830, 267, + 1289, 2100, 2002, 1594, 4237, 348, 369, 1274, 2197, 2178, + 1838, 4552, 1821, 2830, 3701, 2757, 2288, 2003, 4553, + 2951, 2758, 144, 3358, 882, 4554, 3995, 2759, 3470, 4555, + 2915, 5114, 4238, 1726, 320, 5115, 3996, 3046, 788, 2996, + 5116, 2831, 1774, 1327, 2873, 3997, 2832, 5117, 1306, + 4556, 2004, 1700, 3831, 3576, 2364, 2660, 787, 2023, 506, + 824, 3702, 534, 323, 4557, 1044, 3359, 2024, 1901, 946, + 3471, 5118, 1779, 1500, 1678, 5119, 1882, 4558, 165, 243, + 4559, 3703, 2528, 123, 683, 4239, 764, 4560, 36, 3998, + 1793, 589, 2916, 816, 626, 1667, 3047, 2237, 1639, 1555, + 1622, 3832, 3999, 5120, 4000, 2874, 1370, 1228, 1933, + 891, 2084, 2917, 304, 4240, 5121, 292, 2997, 2720, 3577, + 691, 2101, 4241, 1115, 4561, 118, 662, 5122, 611, 1156, + 854, 2386, 1316, 2875, 2, 386, 515, 2918, 5123, 5124, + 3286, 868, 2238, 1486, 855, 2661, 785, 2216, 3048, 5125, + 1040, 3216, 3578, 5126, 3146, 448, 5127, 1525, 5128, 2165, + 4562, 5129, 3833, 5130, 4242, 2833, 3579, 3147, 503, 818, + 4001, 3148, 1568, 814, 676, 1444, 306, 1749, 5131, 3834, + 1416, 1030, 197, 1428, 805, 2834, 1501, 4563, 5132, 5133, + 5134, 1994, 5135, 4564, 5136, 5137, 2198, 13, 2792, 3704, + 2998, 3149, 1229, 1917, 5138, 3835, 2132, 5139, 4243, + 4565, 2404, 3580, 5140, 2217, 1511, 1727, 1120, 5141, + 5142, 646, 3836, 2448, 307, 5143, 5144, 1595, 3217, 5145, + 5146, 5147, 3705, 1113, 1356, 4002, 1465, 2529, 2530, + 5148, 519, 5149, 128, 2133, 92, 2289, 1980, 5150, 4003, + 1512, 342, 3150, 2199, 5151, 2793, 2218, 1981, 3360, 4244, + 290, 1656, 1317, 789, 827, 2365, 5152, 3837, 4566, 562, + 581, 4004, 5153, 401, 4567, 2252, 94, 4568, 5154, 1399, + 2794, 5155, 1463, 2025, 4569, 3218, 1944, 5156, 828, 1105, + 4245, 1262, 1394, 5157, 4246, 605, 4570, 5158, 1784, 2876, + 5159, 2835, 819, 2102, 578, 2200, 2952, 5160, 1502, 436, + 3287, 4247, 3288, 2836, 4005, 2919, 3472, 3473, 5161, + 2721, 2320, 5162, 5163, 2337, 2068, 23, 4571, 193, 826, + 3838, 2103, 699, 1630, 4248, 3098, 390, 1794, 1064, 3581, + 5164, 1579, 3099, 3100, 1400, 5165, 4249, 1839, 1640, + 2877, 5166, 4572, 4573, 137, 4250, 598, 3101, 1967, 780, + 104, 974, 2953, 5167, 278, 899, 253, 402, 572, 504, 493, + 1339, 5168, 4006, 1275, 4574, 2582, 2558, 5169, 3706, + 3049, 3102, 2253, 565, 1334, 2722, 863, 41, 5170, 5171, + 4575, 5172, 1657, 2338, 19, 463, 2760, 4251, 606, 5173, + 2999, 3289, 1087, 2085, 1323, 2662, 3000, 5174, 1631, + 1623, 1750, 4252, 2691, 5175, 2878, 791, 2723, 2663, 2339, + 232, 2421, 5176, 3001, 1498, 5177, 2664, 2630, 755, 1366, + 3707, 3290, 3151, 2026, 1609, 119, 1918, 3474, 862, 1026, + 4253, 5178, 4007, 3839, 4576, 4008, 4577, 2265, 1952, + 2477, 5179, 1125, 817, 4254, 4255, 4009, 1513, 1766, 2041, + 1487, 4256, 3050, 3291, 2837, 3840, 3152, 5180, 5181, + 1507, 5182, 2692, 733, 40, 1632, 1106, 2879, 345, 4257, + 841, 2531, 230, 4578, 3002, 1847, 3292, 3475, 5183, 1263, + 986, 3476, 5184, 735, 879, 254, 1137, 857, 622, 1300, + 1180, 1388, 1562, 4010, 4011, 2954, 967, 2761, 2665, 1349, + 592, 2134, 1692, 3361, 3003, 1995, 4258, 1679, 4012, 1902, + 2188, 5185, 739, 3708, 2724, 1296, 1290, 5186, 4259, 2201, + 2202, 1922, 1563, 2605, 2559, 1871, 2762, 3004, 5187, + 435, 5188, 343, 1108, 596, 17, 1751, 4579, 2239, 3477, + 3709, 5189, 4580, 294, 3582, 2955, 1693, 477, 979, 281, + 2042, 3583, 643, 2043, 3710, 2631, 2795, 2266, 1031, 2340, + 2135, 2303, 3584, 4581, 367, 1249, 2560, 5190, 3585, 5191, + 4582, 1283, 3362, 2005, 240, 1762, 3363, 4583, 4584, 836, + 1069, 3153, 474, 5192, 2149, 2532, 268, 3586, 5193, 3219, + 1521, 1284, 5194, 1658, 1546, 4260, 5195, 3587, 3588, + 5196, 4261, 3364, 2693, 1685, 4262, 961, 1673, 2632, 190, + 2006, 2203, 3841, 4585, 4586, 5197, 570, 2504, 3711, 1490, + 5198, 4587, 2633, 3293, 1957, 4588, 584, 1514, 396, 1045, + 1945, 5199, 4589, 1968, 2449, 5200, 5201, 4590, 4013, + 619, 5202, 3154, 3294, 215, 2007, 2796, 2561, 3220, 4591, + 3221, 4592, 763, 4263, 3842, 4593, 5203, 5204, 1958, 1767, + 2956, 3365, 3712, 1174, 452, 1477, 4594, 3366, 3155, 5205, + 2838, 1253, 2387, 2189, 1091, 2290, 4264, 492, 5206, 638, + 1169, 1825, 2136, 1752, 4014, 648, 926, 1021, 1324, 4595, + 520, 4596, 997, 847, 1007, 892, 4597, 3843, 2267, 1872, + 3713, 2405, 1785, 4598, 1953, 2957, 3103, 3222, 1728, + 4265, 2044, 3714, 4599, 2008, 1701, 3156, 1551, 30, 2268, + 4266, 5207, 2027, 4600, 3589, 5208, 501, 5209, 4267, 594, + 3478, 2166, 1822, 3590, 3479, 3591, 3223, 829, 2839, 4268, + 5210, 1680, 3157, 1225, 4269, 5211, 3295, 4601, 4270, + 3158, 2341, 5212, 4602, 4271, 5213, 4015, 4016, 5214, + 1848, 2388, 2606, 3367, 5215, 4603, 374, 4017, 652, 4272, + 4273, 375, 1140, 798, 5216, 5217, 5218, 2366, 4604, 2269, + 546, 1659, 138, 3051, 2450, 4605, 5219, 2254, 612, 1849, + 910, 796, 3844, 1740, 1371, 825, 3845, 3846, 5220, 2920, + 2562, 5221, 692, 444, 3052, 2634, 801, 4606, 4274, 5222, + 1491, 244, 1053, 3053, 4275, 4276, 340, 5223, 4018, 1041, + 3005, 293, 1168, 87, 1357, 5224, 1539, 959, 5225, 2240, + 721, 694, 4277, 3847, 219, 1478, 644, 1417, 3368, 2666, + 1413, 1401, 1335, 1389, 4019, 5226, 5227, 3006, 2367, + 3159, 1826, 730, 1515, 184, 2840, 66, 4607, 5228, 1660, + 2958, 246, 3369, 378, 1457, 226, 3480, 975, 4020, 2959, + 1264, 3592, 674, 696, 5229, 163, 5230, 1141, 2422, 2167, + 713, 3593, 3370, 4608, 4021, 5231, 5232, 1186, 15, 5233, + 1079, 1070, 5234, 1522, 3224, 3594, 276, 1050, 2725, 758, + 1126, 653, 2960, 3296, 5235, 2342, 889, 3595, 4022, 3104, + 3007, 903, 1250, 4609, 4023, 3481, 3596, 1342, 1681, 1718, + 766, 3297, 286, 89, 2961, 3715, 5236, 1713, 5237, 2607, + 3371, 3008, 5238, 2962, 2219, 3225, 2880, 5239, 4610, + 2505, 2533, 181, 387, 1075, 4024, 731, 2190, 3372, 5240, + 3298, 310, 313, 3482, 2304, 770, 4278, 54, 3054, 189, + 4611, 3105, 3848, 4025, 5241, 1230, 1617, 1850, 355, 3597, + 4279, 4612, 3373, 111, 4280, 3716, 1350, 3160, 3483, 3055, + 4281, 2150, 3299, 3598, 5242, 2797, 4026, 4027, 3009, + 722, 2009, 5243, 1071, 247, 1207, 2343, 2478, 1378, 4613, + 2010, 864, 1437, 1214, 4614, 373, 3849, 1142, 2220, 667, + 4615, 442, 2763, 2563, 3850, 4028, 1969, 4282, 3300, 1840, + 837, 170, 1107, 934, 1336, 1883, 5244, 5245, 2119, 4283, + 2841, 743, 1569, 5246, 4616, 4284, 582, 2389, 1418, 3484, + 5247, 1803, 5248, 357, 1395, 1729, 3717, 3301, 2423, 1564, + 2241, 5249, 3106, 3851, 1633, 4617, 1114, 2086, 4285, + 1532, 5250, 482, 2451, 4618, 5251, 5252, 1492, 833, 1466, + 5253, 2726, 3599, 1641, 2842, 5254, 1526, 1272, 3718, + 4286, 1686, 1795, 416, 2564, 1903, 1954, 1804, 5255, 3852, + 2798, 3853, 1159, 2321, 5256, 2881, 4619, 1610, 1584, + 3056, 2424, 2764, 443, 3302, 1163, 3161, 5257, 5258, 4029, + 5259, 4287, 2506, 3057, 4620, 4030, 3162, 2104, 1647, + 3600, 2011, 1873, 4288, 5260, 4289, 431, 3485, 5261, 250, + 97, 81, 4290, 5262, 1648, 1851, 1558, 160, 848, 5263, + 866, 740, 1694, 5264, 2204, 2843, 3226, 4291, 4621, 3719, + 1687, 950, 2479, 426, 469, 3227, 3720, 3721, 4031, 5265, + 5266, 1188, 424, 1996, 861, 3601, 4292, 3854, 2205, 2694, + 168, 1235, 3602, 4293, 5267, 2087, 1674, 4622, 3374, 3303, + 220, 2565, 1009, 5268, 3855, 670, 3010, 332, 1208, 717, + 5269, 5270, 3603, 2452, 4032, 3375, 5271, 513, 5272, 1209, + 2882, 3376, 3163, 4623, 1080, 5273, 5274, 5275, 5276, + 2534, 3722, 3604, 815, 1587, 4033, 4034, 5277, 3605, 3486, + 3856, 1254, 4624, 1328, 3058, 1390, 4035, 1741, 4036, + 3857, 4037, 5278, 236, 3858, 2453, 3304, 5279, 5280, 3723, + 3859, 1273, 3860, 4625, 5281, 308, 5282, 4626, 245, 4627, + 1852, 2480, 1307, 2583, 430, 715, 2137, 2454, 5283, 270, + 199, 2883, 4038, 5284, 3606, 2727, 1753, 761, 1754, 725, + 1661, 1841, 4628, 3487, 3724, 5285, 5286, 587, 14, 3305, + 227, 2608, 326, 480, 2270, 943, 2765, 3607, 291, 650, + 1884, 5287, 1702, 1226, 102, 1547, 62, 3488, 904, 4629, + 3489, 1164, 4294, 5288, 5289, 1224, 1548, 2766, 391, 498, + 1493, 5290, 1386, 1419, 5291, 2056, 1177, 4630, 813, 880, + 1081, 2368, 566, 1145, 4631, 2291, 1001, 1035, 2566, 2609, + 2242, 394, 1286, 5292, 5293, 2069, 5294, 86, 1494, 1730, + 4039, 491, 1588, 745, 897, 2963, 843, 3377, 4040, 2767, + 2884, 3306, 1768, 998, 2221, 2070, 397, 1827, 1195, 1970, + 3725, 3011, 3378, 284, 5295, 3861, 2507, 2138, 2120, 1904, + 5296, 4041, 2151, 4042, 4295, 1036, 3490, 1905, 114, 2567, + 4296, 209, 1527, 5297, 5298, 2964, 2844, 2635, 2390, 2728, + 3164, 812, 2568, 5299, 3307, 5300, 1559, 737, 1885, 3726, + 1210, 885, 28, 2695, 3608, 3862, 5301, 4297, 1004, 1780, + 4632, 5302, 346, 1982, 2222, 2696, 4633, 3863, 1742, 797, + 1642, 4043, 1934, 1072, 1384, 2152, 896, 4044, 3308, 3727, + 3228, 2885, 3609, 5303, 2569, 1959, 4634, 2455, 1786, + 5304, 5305, 5306, 4045, 4298, 1005, 1308, 3728, 4299, + 2729, 4635, 4636, 1528, 2610, 161, 1178, 4300, 1983, 987, + 4637, 1101, 4301, 631, 4046, 1157, 3229, 2425, 1343, 1241, + 1016, 2243, 2570, 372, 877, 2344, 2508, 1160, 555, 1935, + 911, 4047, 5307, 466, 1170, 169, 1051, 2921, 2697, 3729, + 2481, 3012, 1182, 2012, 2571, 1251, 2636, 5308, 992, 2345, + 3491, 1540, 2730, 1201, 2071, 2406, 1997, 2482, 5309, + 4638, 528, 1923, 2191, 1503, 1874, 1570, 2369, 3379, 3309, + 5310, 557, 1073, 5311, 1828, 3492, 2088, 2271, 3165, 3059, + 3107, 767, 3108, 2799, 4639, 1006, 4302, 4640, 2346, 1267, + 2179, 3730, 3230, 778, 4048, 3231, 2731, 1597, 2667, 5312, + 4641, 5313, 3493, 5314, 5315, 5316, 3310, 2698, 1433, + 3311, 131, 95, 1504, 4049, 723, 4303, 3166, 1842, 3610, + 2768, 2192, 4050, 2028, 2105, 3731, 5317, 3013, 4051, + 1218, 5318, 3380, 3232, 4052, 4304, 2584, 248, 1634, 3864, + 912, 5319, 2845, 3732, 3060, 3865, 654, 53, 5320, 3014, + 5321, 1688, 4642, 777, 3494, 1032, 4053, 1425, 5322, 191, + 820, 2121, 2846, 971, 4643, 931, 3233, 135, 664, 783, + 3866, 1998, 772, 2922, 1936, 4054, 3867, 4644, 2923, 3234, + 282, 2732, 640, 1372, 3495, 1127, 922, 325, 3381, 5323, + 5324, 711, 2045, 5325, 5326, 4055, 2223, 2800, 1937, 4056, + 3382, 2224, 2255, 3868, 2305, 5327, 4645, 3869, 1258, + 3312, 4057, 3235, 2139, 2965, 4058, 4059, 5328, 2225, + 258, 3236, 4646, 101, 1227, 5329, 3313, 1755, 5330, 1391, + 3314, 5331, 2924, 2057, 893, 5332, 5333, 5334, 1402, 4305, + 2347, 5335, 5336, 3237, 3611, 5337, 5338, 878, 1325, 1781, + 2801, 4647, 259, 1385, 2585, 744, 1183, 2272, 4648, 5339, + 4060, 2509, 5340, 684, 1024, 4306, 5341, 472, 3612, 3496, + 1165, 3315, 4061, 4062, 322, 2153, 881, 455, 1695, 1152, + 1340, 660, 554, 2154, 4649, 1058, 4650, 4307, 830, 1065, + 3383, 4063, 4651, 1924, 5342, 1703, 1919, 5343, 932, 2273, + 122, 5344, 4652, 947, 677, 5345, 3870, 2637, 297, 1906, + 1925, 2274, 4653, 2322, 3316, 5346, 5347, 4308, 5348, + 4309, 84, 4310, 112, 989, 5349, 547, 1059, 4064, 701, + 3613, 1019, 5350, 4311, 5351, 3497, 942, 639, 457, 2306, + 2456, 993, 2966, 407, 851, 494, 4654, 3384, 927, 5352, + 1237, 5353, 2426, 3385, 573, 4312, 680, 921, 2925, 1279, + 1875, 285, 790, 1448, 1984, 719, 2168, 5354, 5355, 4655, + 4065, 4066, 1649, 5356, 1541, 563, 5357, 1077, 5358, 3386, + 3061, 3498, 511, 3015, 4067, 4068, 3733, 4069, 1268, 2572, + 3387, 3238, 4656, 4657, 5359, 535, 1048, 1276, 1189, 2926, + 2029, 3167, 1438, 1373, 2847, 2967, 1134, 2013, 5360, + 4313, 1238, 2586, 3109, 1259, 5361, 700, 5362, 2968, 3168, + 3734, 4314, 5363, 4315, 1146, 1876, 1907, 4658, 2611, + 4070, 781, 2427, 132, 1589, 203, 147, 273, 2802, 2407, + 898, 1787, 2155, 4071, 4072, 5364, 3871, 2803, 5365, 5366, + 4659, 4660, 5367, 3239, 5368, 1635, 3872, 965, 5369, 1805, + 2699, 1516, 3614, 1121, 1082, 1329, 3317, 4073, 1449, + 3873, 65, 1128, 2848, 2927, 2769, 1590, 3874, 5370, 5371, + 12, 2668, 45, 976, 2587, 3169, 4661, 517, 2535, 1013, + 1037, 3240, 5372, 3875, 2849, 5373, 3876, 5374, 3499, + 5375, 2612, 614, 1999, 2323, 3877, 3110, 2733, 2638, 5376, + 2588, 4316, 599, 1269, 5377, 1811, 3735, 5378, 2700, 3111, + 759, 1060, 489, 1806, 3388, 3318, 1358, 5379, 5380, 2391, + 1387, 1215, 2639, 2256, 490, 5381, 5382, 4317, 1759, 2392, + 2348, 5383, 4662, 3878, 1908, 4074, 2640, 1807, 3241, + 4663, 3500, 3319, 2770, 2349, 874, 5384, 5385, 3501, 3736, + 1859, 91, 2928, 3737, 3062, 3879, 4664, 5386, 3170, 4075, + 2669, 5387, 3502, 1202, 1403, 3880, 2969, 2536, 1517, + 2510, 4665, 3503, 2511, 5388, 4666, 5389, 2701, 1886, + 1495, 1731, 4076, 2370, 4667, 5390, 2030, 5391, 5392, + 4077, 2702, 1216, 237, 2589, 4318, 2324, 4078, 3881, 4668, + 4669, 2703, 3615, 3504, 445, 4670, 5393, 5394, 5395, 5396, + 2771, 61, 4079, 3738, 1823, 4080, 5397, 687, 2046, 935, + 925, 405, 2670, 703, 1096, 1860, 2734, 4671, 4081, 1877, + 1367, 2704, 3389, 918, 2106, 1782, 2483, 334, 3320, 1611, + 1093, 4672, 564, 3171, 3505, 3739, 3390, 945, 2641, 2058, + 4673, 5398, 1926, 872, 4319, 5399, 3506, 2705, 3112, 349, + 4320, 3740, 4082, 4674, 3882, 4321, 3741, 2156, 4083, + 4675, 4676, 4322, 4677, 2408, 2047, 782, 4084, 400, 251, + 4323, 1624, 5400, 5401, 277, 3742, 299, 1265, 476, 1191, + 3883, 2122, 4324, 4325, 1109, 205, 5402, 2590, 1000, 2157, + 3616, 1861, 5403, 5404, 5405, 4678, 5406, 4679, 2573, + 107, 2484, 2158, 4085, 3507, 3172, 5407, 1533, 541, 1301, + 158, 753, 4326, 2886, 3617, 5408, 1696, 370, 1088, 4327, + 4680, 3618, 579, 327, 440, 162, 2244, 269, 1938, 1374, + 3508, 968, 3063, 56, 1396, 3113, 2107, 3321, 3391, 5409, + 1927, 2159, 4681, 3016, 5410, 3619, 5411, 5412, 3743, + 4682, 2485, 5413, 2804, 5414, 1650, 4683, 5415, 2613, + 5416, 5417, 4086, 2671, 3392, 1149, 3393, 4087, 3884, + 4088, 5418, 1076, 49, 5419, 951, 3242, 3322, 3323, 450, + 2850, 920, 5420, 1812, 2805, 2371, 4328, 1909, 1138, 2372, + 3885, 3509, 5421, 3243, 4684, 1910, 1147, 1518, 2428, + 4685, 3886, 5422, 4686, 2393, 2614, 260, 1796, 3244, 5423, + 5424, 3887, 3324, 708, 5425, 3620, 1704, 5426, 3621, 1351, + 1618, 3394, 3017, 1887, 944, 4329, 3395, 4330, 3064, 3396, + 4331, 5427, 3744, 422, 413, 1714, 3325, 500, 2059, 2350, + 4332, 2486, 5428, 1344, 1911, 954, 5429, 1668, 5430, 5431, + 4089, 2409, 4333, 3622, 3888, 4334, 5432, 2307, 1318, + 2512, 3114, 133, 3115, 2887, 4687, 629, 31, 2851, 2706, + 3889, 4688, 850, 949, 4689, 4090, 2970, 1732, 2089, 4335, + 1496, 1853, 5433, 4091, 620, 3245, 981, 1242, 3745, 3397, + 1619, 3746, 1643, 3326, 2140, 2457, 1971, 1719, 3510, + 2169, 5434, 3246, 5435, 5436, 3398, 1829, 5437, 1277, + 4690, 1565, 2048, 5438, 1636, 3623, 3116, 5439, 869, 2852, + 655, 3890, 3891, 3117, 4092, 3018, 3892, 1310, 3624, 4691, + 5440, 5441, 5442, 1733, 558, 4692, 3747, 335, 1549, 3065, + 1756, 4336, 3748, 1946, 3511, 1830, 1291, 1192, 470, 2735, + 2108, 2806, 913, 1054, 4093, 5443, 1027, 5444, 3066, 4094, + 4693, 982, 2672, 3399, 3173, 3512, 3247, 3248, 1947, 2807, + 5445, 571, 4694, 5446, 1831, 5447, 3625, 2591, 1523, 2429, + 5448, 2090, 984, 4695, 3749, 1960, 5449, 3750, 852, 923, + 2808, 3513, 3751, 969, 1519, 999, 2049, 2325, 1705, 5450, + 3118, 615, 1662, 151, 597, 4095, 2410, 2326, 1049, 275, + 4696, 3752, 4337, 568, 3753, 3626, 2487, 4338, 3754, 5451, + 2430, 2275, 409, 3249, 5452, 1566, 2888, 3514, 1002, 769, + 2853, 194, 2091, 3174, 3755, 2226, 3327, 4339, 628, 1505, + 5453, 5454, 1763, 2180, 3019, 4096, 521, 1161, 2592, 1788, + 2206, 2411, 4697, 4097, 1625, 4340, 4341, 412, 42, 3119, + 464, 5455, 2642, 4698, 3400, 1760, 1571, 2889, 3515, 2537, + 1219, 2207, 3893, 2643, 2141, 2373, 4699, 4700, 3328, + 1651, 3401, 3627, 5456, 5457, 3628, 2488, 3516, 5458, + 3756, 5459, 5460, 2276, 2092, 460, 5461, 4701, 5462, 3020, + 962, 588, 3629, 289, 3250, 2644, 1116, 52, 5463, 3067, + 1797, 5464, 5465, 5466, 1467, 5467, 1598, 1143, 3757, + 4342, 1985, 1734, 1067, 4702, 1280, 3402, 465, 4703, 1572, + 510, 5468, 1928, 2245, 1813, 1644, 3630, 5469, 4704, 3758, + 5470, 5471, 2673, 1573, 1534, 5472, 5473, 536, 1808, 1761, + 3517, 3894, 3175, 2645, 5474, 5475, 5476, 4705, 3518, + 2929, 1912, 2809, 5477, 3329, 1122, 377, 3251, 5478, 360, + 5479, 5480, 4343, 1529, 551, 5481, 2060, 3759, 1769, 2431, + 5482, 2930, 4344, 3330, 3120, 2327, 2109, 2031, 4706, + 1404, 136, 1468, 1479, 672, 1171, 3252, 2308, 271, 3176, + 5483, 2772, 5484, 2050, 678, 2736, 865, 1948, 4707, 5485, + 2014, 4098, 2971, 5486, 2737, 2227, 1397, 3068, 3760, + 4708, 4709, 1735, 2931, 3403, 3631, 5487, 3895, 509, 2854, + 2458, 2890, 3896, 5488, 5489, 3177, 3178, 4710, 4345, + 2538, 4711, 2309, 1166, 1010, 552, 681, 1888, 5490, 5491, + 2972, 2973, 4099, 1287, 1596, 1862, 3179, 358, 453, 736, + 175, 478, 1117, 905, 1167, 1097, 5492, 1854, 1530, 5493, + 1706, 5494, 2181, 3519, 2292, 3761, 3520, 3632, 4346, + 2093, 4347, 5495, 3404, 1193, 2489, 4348, 1458, 2193, + 2208, 1863, 1889, 1421, 3331, 2932, 3069, 2182, 3521, + 595, 2123, 5496, 4100, 5497, 5498, 4349, 1707, 2646, 223, + 3762, 1359, 751, 3121, 183, 3522, 5499, 2810, 3021, 419, + 2374, 633, 704, 3897, 2394, 241, 5500, 5501, 5502, 838, + 3022, 3763, 2277, 2773, 2459, 3898, 1939, 2051, 4101, + 1309, 3122, 2246, 1181, 5503, 1136, 2209, 3899, 2375, + 1446, 4350, 2310, 4712, 5504, 5505, 4351, 1055, 2615, + 484, 3764, 5506, 4102, 625, 4352, 2278, 3405, 1499, 4353, + 4103, 5507, 4104, 4354, 3253, 2279, 2280, 3523, 5508, + 5509, 2774, 808, 2616, 3765, 3406, 4105, 4355, 3123, 2539, + 526, 3407, 3900, 4356, 955, 5510, 1620, 4357, 2647, 2432, + 5511, 1429, 3766, 1669, 1832, 994, 928, 5512, 3633, 1260, + 5513, 5514, 5515, 1949, 2293, 741, 2933, 1626, 4358, 2738, + 2460, 867, 1184, 362, 3408, 1392, 5516, 5517, 4106, 4359, + 1770, 1736, 3254, 2934, 4713, 4714, 1929, 2707, 1459, + 1158, 5518, 3070, 3409, 2891, 1292, 1930, 2513, 2855, + 3767, 1986, 1187, 2072, 2015, 2617, 4360, 5519, 2574, + 2514, 2170, 3768, 2490, 3332, 5520, 3769, 4715, 5521, + 5522, 666, 1003, 3023, 1022, 3634, 4361, 5523, 4716, 1814, + 2257, 574, 3901, 1603, 295, 1535, 705, 3902, 4362, 283, + 858, 417, 5524, 5525, 3255, 4717, 4718, 3071, 1220, 1890, + 1046, 2281, 2461, 4107, 1393, 1599, 689, 2575, 388, 4363, + 5526, 2491, 802, 5527, 2811, 3903, 2061, 1405, 2258, 5528, + 4719, 3904, 2110, 1052, 1345, 3256, 1585, 5529, 809, 5530, + 5531, 5532, 575, 2739, 3524, 956, 1552, 1469, 1144, 2328, + 5533, 2329, 1560, 2462, 3635, 3257, 4108, 616, 2210, 4364, + 3180, 2183, 2294, 5534, 1833, 5535, 3525, 4720, 5536, + 1319, 3770, 3771, 1211, 3636, 1023, 3258, 1293, 2812, + 5537, 5538, 5539, 3905, 607, 2311, 3906, 762, 2892, 1439, + 4365, 1360, 4721, 1485, 3072, 5540, 4722, 1038, 4366, + 1450, 2062, 2648, 4367, 1379, 4723, 2593, 5541, 5542, + 4368, 1352, 1414, 2330, 2935, 1172, 5543, 5544, 3907, + 3908, 4724, 1798, 1451, 5545, 5546, 5547, 5548, 2936, + 4109, 4110, 2492, 2351, 411, 4111, 4112, 3637, 3333, 3124, + 4725, 1561, 2674, 1452, 4113, 1375, 5549, 5550, 47, 2974, + 316, 5551, 1406, 1591, 2937, 3181, 5552, 1025, 2142, 3125, + 3182, 354, 2740, 884, 2228, 4369, 2412, 508, 3772, 726, + 3638, 996, 2433, 3639, 729, 5553, 392, 2194, 1453, 4114, + 4726, 3773, 5554, 5555, 2463, 3640, 2618, 1675, 2813, + 919, 2352, 2975, 2353, 1270, 4727, 4115, 73, 5556, 5557, + 647, 5558, 3259, 2856, 2259, 1550, 1346, 3024, 5559, 1332, + 883, 3526, 5560, 5561, 5562, 5563, 3334, 2775, 5564, 1212, + 831, 1347, 4370, 4728, 2331, 3909, 1864, 3073, 720, 3910, + 4729, 4730, 3911, 5565, 4371, 5566, 5567, 4731, 5568, + 5569, 1799, 4732, 3774, 2619, 4733, 3641, 1645, 2376, + 4734, 5570, 2938, 669, 2211, 2675, 2434, 5571, 2893, 5572, + 5573, 1028, 3260, 5574, 4372, 2413, 5575, 2260, 1353, + 5576, 5577, 4735, 3183, 518, 5578, 4116, 5579, 4373, 1961, + 5580, 2143, 4374, 5581, 5582, 3025, 2354, 2355, 3912, + 516, 1834, 1454, 4117, 2708, 4375, 4736, 2229, 2620, 1972, + 1129, 3642, 5583, 2776, 5584, 2976, 1422, 577, 1470, 3026, + 1524, 3410, 5585, 5586, 432, 4376, 3074, 3527, 5587, 2594, + 1455, 2515, 2230, 1973, 1175, 5588, 1020, 2741, 4118, + 3528, 4737, 5589, 2742, 5590, 1743, 1361, 3075, 3529, + 2649, 4119, 4377, 4738, 2295, 895, 924, 4378, 2171, 331, + 2247, 3076, 166, 1627, 3077, 1098, 5591, 1232, 2894, 2231, + 3411, 4739, 657, 403, 1196, 2377, 542, 3775, 3412, 1600, + 4379, 3530, 5592, 4740, 2777, 3261, 576, 530, 1362, 4741, + 4742, 2540, 2676, 3776, 4120, 5593, 842, 3913, 5594, 2814, + 2032, 1014, 4121, 213, 2709, 3413, 665, 621, 4380, 5595, + 3777, 2939, 2435, 5596, 2436, 3335, 3643, 3414, 4743, + 4381, 2541, 4382, 4744, 3644, 1682, 4383, 3531, 1380, + 5597, 724, 2282, 600, 1670, 5598, 1337, 1233, 4745, 3126, + 2248, 5599, 1621, 4746, 5600, 651, 4384, 5601, 1612, 4385, + 2621, 5602, 2857, 5603, 2743, 2312, 3078, 5604, 716, 2464, + 3079, 174, 1255, 2710, 4122, 3645, 548, 1320, 1398, 728, + 4123, 1574, 5605, 1891, 1197, 3080, 4124, 5606, 3081, + 3082, 3778, 3646, 3779, 747, 5607, 635, 4386, 4747, 5608, + 5609, 5610, 4387, 5611, 5612, 4748, 5613, 3415, 4749, + 2437, 451, 5614, 3780, 2542, 2073, 4388, 2744, 4389, 4125, + 5615, 1764, 4750, 5616, 4390, 350, 4751, 2283, 2395, 2493, + 5617, 4391, 4126, 2249, 1434, 4127, 488, 4752, 458, 4392, + 4128, 3781, 771, 1330, 2396, 3914, 2576, 3184, 2160, 2414, + 1553, 2677, 3185, 4393, 5618, 2494, 2895, 2622, 1720, + 2711, 4394, 3416, 4753, 5619, 2543, 4395, 5620, 3262, + 4396, 2778, 5621, 2016, 2745, 5622, 1155, 1017, 3782, + 3915, 5623, 3336, 2313, 201, 1865, 4397, 1430, 5624, 4129, + 5625, 5626, 5627, 5628, 5629, 4398, 1604, 5630, 414, 1866, + 371, 2595, 4754, 4755, 3532, 2017, 3127, 4756, 1708, 960, + 4399, 887, 389, 2172, 1536, 1663, 1721, 5631, 2232, 4130, + 2356, 2940, 1580, 5632, 5633, 1744, 4757, 2544, 4758, + 4759, 5634, 4760, 5635, 2074, 5636, 4761, 3647, 3417, + 2896, 4400, 5637, 4401, 2650, 3418, 2815, 673, 2712, 2465, + 709, 3533, 4131, 3648, 4402, 5638, 1148, 502, 634, 5639, + 5640, 1204, 4762, 3649, 1575, 4763, 2623, 3783, 5641, + 3784, 3128, 948, 3263, 121, 1745, 3916, 1110, 5642, 4403, + 3083, 2516, 3027, 4132, 3785, 1151, 1771, 3917, 1488, + 4133, 1987, 5643, 2438, 3534, 5644, 5645, 2094, 5646, + 4404, 3918, 1213, 1407, 2816, 531, 2746, 2545, 3264, 1011, + 1537, 4764, 2779, 4405, 3129, 1061, 5647, 3786, 3787, + 1867, 2897, 5648, 2018, 120, 4406, 4407, 2063, 3650, 3265, + 2314, 3919, 2678, 3419, 1955, 4765, 4134, 5649, 3535, + 1047, 2713, 1266, 5650, 1368, 4766, 2858, 649, 3420, 3920, + 2546, 2747, 1102, 2859, 2679, 5651, 5652, 2000, 5653, + 1111, 3651, 2977, 5654, 2495, 3921, 3652, 2817, 1855, + 3421, 3788, 5655, 5656, 3422, 2415, 2898, 3337, 3266, + 3653, 5657, 2577, 5658, 3654, 2818, 4135, 1460, 856, 5659, + 3655, 5660, 2899, 2978, 5661, 2900, 3922, 5662, 4408, + 632, 2517, 875, 3923, 1697, 3924, 2296, 5663, 5664, 4767, + 3028, 1239, 580, 4768, 4409, 5665, 914, 936, 2075, 1190, + 4136, 1039, 2124, 5666, 5667, 5668, 5669, 3423, 1473, + 5670, 1354, 4410, 3925, 4769, 2173, 3084, 4137, 915, 3338, + 4411, 4412, 3339, 1605, 1835, 5671, 2748, 398, 3656, 4413, + 3926, 4138, 328, 1913, 2860, 4139, 3927, 1331, 4414, 3029, + 937, 4415, 5672, 3657, 4140, 4141, 3424, 2161, 4770, 3425, + 524, 742, 538, 3085, 1012, 5673, 5674, 3928, 2466, 5675, + 658, 1103, 225, 3929, 5676, 5677, 4771, 5678, 4772, 5679, + 3267, 1243, 5680, 4142, 963, 2250, 4773, 5681, 2714, 3658, + 3186, 5682, 5683, 2596, 2332, 5684, 4774, 5685, 5686, + 5687, 3536, 957, 3426, 2547, 2033, 1931, 2941, 2467, 870, + 2019, 3659, 1746, 2780, 2781, 2439, 2468, 5688, 3930, + 5689, 3789, 3130, 3790, 3537, 3427, 3791, 5690, 1179, + 3086, 5691, 3187, 2378, 4416, 3792, 2548, 3188, 3131, + 2749, 4143, 5692, 3428, 1556, 2549, 2297, 977, 2901, 2034, + 4144, 1205, 3429, 5693, 1765, 3430, 3189, 2125, 1271, + 714, 1689, 4775, 3538, 5694, 2333, 3931, 533, 4417, 3660, + 2184, 617, 5695, 2469, 3340, 3539, 2315, 5696, 5697, 3190, + 5698, 5699, 3932, 1988, 618, 427, 2651, 3540, 3431, 5700, + 5701, 1244, 1690, 5702, 2819, 4418, 4776, 5703, 3541, + 4777, 5704, 2284, 1576, 473, 3661, 4419, 3432, 972, 5705, + 3662, 5706, 3087, 5707, 5708, 4778, 4779, 5709, 3793, + 4145, 4146, 5710, 153, 4780, 356, 5711, 1892, 2902, 4420, + 2144, 408, 803, 2357, 5712, 3933, 5713, 4421, 1646, 2578, + 2518, 4781, 4782, 3934, 5714, 3935, 4422, 5715, 2416, + 3433, 752, 5716, 5717, 1962, 3341, 2979, 5718, 746, 3030, + 2470, 4783, 4423, 3794, 698, 4784, 1893, 4424, 3663, 2550, + 4785, 3664, 3936, 5719, 3191, 3434, 5720, 1824, 1302, + 4147, 2715, 3937, 1974, 4425, 5721, 4426, 3192, 823, 1303, + 1288, 1236, 2861, 3542, 4148, 3435, 774, 3938, 5722, 1581, + 4786, 1304, 2862, 3939, 4787, 5723, 2440, 2162, 1083, + 3268, 4427, 4149, 4428, 344, 1173, 288, 2316, 454, 1683, + 5724, 5725, 1461, 4788, 4150, 2597, 5726, 5727, 4789, + 985, 894, 5728, 3436, 3193, 5729, 1914, 2942, 3795, 1989, + 5730, 2111, 1975, 5731, 4151, 5732, 2579, 1194, 425, 5733, + 4790, 3194, 1245, 3796, 4429, 5734, 5735, 2863, 5736, + 636, 4791, 1856, 3940, 760, 1800, 5737, 4430, 2212, 1508, + 4792, 4152, 1894, 1684, 2298, 5738, 5739, 4793, 4431, + 4432, 2213, 479, 5740, 5741, 832, 5742, 4153, 2496, 5743, + 2980, 2497, 3797, 990, 3132, 627, 1815, 2652, 4433, 1582, + 4434, 2126, 2112, 3543, 4794, 5744, 799, 4435, 3195, 5745, + 4795, 2113, 1737, 3031, 1018, 543, 754, 4436, 3342, 1676, + 4796, 4797, 4154, 4798, 1489, 5746, 3544, 5747, 2624, + 2903, 4155, 5748, 5749, 2981, 5750, 5751, 5752, 5753, + 3196, 4799, 4800, 2185, 1722, 5754, 3269, 3270, 1843, + 3665, 1715, 481, 365, 1976, 1857, 5755, 5756, 1963, 2498, + 4801, 5757, 2127, 3666, 3271, 433, 1895, 2064, 2076, 5758, + 602, 2750, 5759, 5760, 5761, 5762, 5763, 3032, 1628, 3437, + 5764, 3197, 4802, 4156, 2904, 4803, 2519, 5765, 2551, + 2782, 5766, 5767, 5768, 3343, 4804, 2905, 5769, 4805, + 5770, 2864, 4806, 4807, 1221, 2982, 4157, 2520, 5771, + 5772, 5773, 1868, 1990, 5774, 5775, 5776, 1896, 5777, + 5778, 4808, 1897, 4158, 318, 5779, 2095, 4159, 4437, 5780, + 5781, 485, 5782, 938, 3941, 553, 2680, 116, 5783, 3942, + 3667, 5784, 3545, 2681, 2783, 3438, 3344, 2820, 5785, + 3668, 2943, 4160, 1747, 2944, 2983, 5786, 5787, 207, 5788, + 4809, 5789, 4810, 2521, 5790, 3033, 890, 3669, 3943, 5791, + 1878, 3798, 3439, 5792, 2186, 2358, 3440, 1652, 5793, + 5794, 5795, 941, 2299, 208, 3546, 4161, 2020, 330, 4438, + 3944, 2906, 2499, 3799, 4439, 4811, 5796, 5797, 5798) diff --git a/APPS_UNCOMPILED/lib/chardet/big5prober.py b/APPS_UNCOMPILED/lib/chardet/big5prober.py new file mode 100644 index 0000000..a24c6d9 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/big5prober.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/big5prober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1757 bytes +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + +class Big5Prober(MultiByteCharSetProber): + + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/APPS_UNCOMPILED/lib/chardet/chardistribution.py b/APPS_UNCOMPILED/lib/chardet/chardistribution.py new file mode 100644 index 0000000..702453a --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/chardistribution.py @@ -0,0 +1,163 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/chardistribution.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 9411 bytes +from .euctwfreq import EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, EUCTW_TYPICAL_DISTRIBUTION_RATIO +from .euckrfreq import EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, EUCKR_TYPICAL_DISTRIBUTION_RATIO +from .gb2312freq import GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, GB2312_TYPICAL_DISTRIBUTION_RATIO +from .big5freq import BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, BIG5_TYPICAL_DISTRIBUTION_RATIO +from .jisfreq import JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, JIS_TYPICAL_DISTRIBUTION_RATIO + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + self._char_to_freq_order = None + self._table_size = None + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + self._done = False + self._total_chars = 0 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + if self._total_chars != self._freq_chars: + r = self._freq_chars / ((self._total_chars - self._freq_chars) * self.typical_distribution_ratio) + if r < self.SURE_YES: + return r + return self.SURE_YES + + def got_enough_data(self): + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + first_char = byte_str[0] + if first_char >= 196: + return 94 * (first_char - 196) + byte_str[1] - 161 + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + first_char = byte_str[0] + if first_char >= 176: + return 94 * (first_char - 176) + byte_str[1] - 161 + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 176: + if second_char >= 161: + return 94 * (first_char - 176) + second_char - 161 + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 164: + if second_char >= 161: + return 157 * (first_char - 164) + second_char - 161 + 63 + return 157 * (first_char - 164) + second_char - 64 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 129 and first_char <= 159: + order = 188 * (first_char - 129) + else: + if first_char >= 224 and first_char <= 239: + order = 188 * (first_char - 224 + 31) + else: + return -1 + order = order + second_char - 64 + if second_char > 127: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + char = byte_str[0] + if char >= 160: + return 94 * (char - 161) + byte_str[1] - 161 + return -1 diff --git a/APPS_UNCOMPILED/lib/chardet/charsetgroupprober.py b/APPS_UNCOMPILED/lib/chardet/charsetgroupprober.py new file mode 100644 index 0000000..a851db9 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/charsetgroupprober.py @@ -0,0 +1,89 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/charsetgroupprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 3839 bytes +from .enums import ProbingState +from .charsetprober import CharSetProber + +class CharSetGroupProber(CharSetProber): + + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + self._state = ProbingState.FOUND_IT + return self.state + if state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug("%s not active", prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug("%s %s confidence = %s", prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + + return self._best_guess_prober or 0.0 + return best_conf diff --git a/APPS_UNCOMPILED/lib/chardet/charsetprober.py b/APPS_UNCOMPILED/lib/chardet/charsetprober.py new file mode 100644 index 0000000..19ba84f --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/charsetprober.py @@ -0,0 +1,99 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/charsetprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 5110 bytes +import logging, re +from .enums import ProbingState + +class CharSetProber(object): + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + pass + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7f])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-ÿ] + marker: everything else [^a-zA-Z\x80-ÿ] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + words = re.findall(b'[a-zA-Z]*[\x80-\xff]+[a-zA-Z]*[^a-zA-Z\x80-\xff]?', buf) + for word in words: + filtered.extend(word[None[:-1]]) + last_char = word[(-1)[:None]] + if not last_char.isalpha(): + if last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + for curr in range(len(buf)): + buf_char = buf[curr[:curr + 1]] + if buf_char == b'>': + in_tag = False + else: + if buf_char == b'<': + in_tag = True + if buf_char < b'\x80': + if buf_char.isalpha() or curr > prev: + if not in_tag: + filtered.extend(buf[prev[:curr]]) + filtered.extend(b' ') + prev = curr + 1 + + if not in_tag: + filtered.extend(buf[prev[:None]]) + return filtered diff --git a/APPS_UNCOMPILED/lib/chardet/cli/__init__.py b/APPS_UNCOMPILED/lib/chardet/cli/__init__.py new file mode 100644 index 0000000..30e35e7 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/cli/__init__.py @@ -0,0 +1,8 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/cli/__init__.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1 bytes +pass diff --git a/APPS_UNCOMPILED/lib/chardet/cli/chardetect.py b/APPS_UNCOMPILED/lib/chardet/cli/chardetect.py new file mode 100644 index 0000000..dea70c2 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/cli/chardetect.py @@ -0,0 +1,77 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/cli/chardetect.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 2711 bytes +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" +from __future__ import absolute_import, print_function, unicode_literals +import argparse, sys +from chardet import __version__ +from chardet.compat import PY2 +from chardet.universaldetector import UniversalDetector + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + if u.done: + break + + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), "ignore") + if result["encoding"]: + return "{}: {} with confidence {}".format(name, result["encoding"], result["confidence"]) + return "{}: no result".format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + parser = argparse.ArgumentParser(description="Takes one or more file paths and reports their detected encodings") + parser.add_argument("input", help="File whose encoding we would like to determine. (default: stdin)", + type=(argparse.FileType("rb")), + nargs="*", + default=[ + sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument("--version", action="version", version=("%(prog)s {}".format(__version__))) + args = parser.parse_args(argv) + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press CTRL-D twice at the start of a blank line to signal the end of your input. If you want help, run chardetect --help\n", + file=(sys.stderr)) + print(description_of(f, f.name)) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/lib/chardet/codingstatemachine.py b/APPS_UNCOMPILED/lib/chardet/codingstatemachine.py new file mode 100644 index 0000000..d687df2 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/codingstatemachine.py @@ -0,0 +1,43 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/codingstatemachine.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 3590 bytes +import logging +from .enums import MachineState + +class CodingStateMachine(object): + __doc__ = "\n A state machine to verify a byte sequence for a particular encoding. For\n each byte the detector receives, it will feed that byte to every active\n state machine available, one byte at a time. The state machine changes its\n state based on its previous state and the byte it receives. There are 3\n states in a state machine that are of interest to an auto-detector:\n\n START state: This is the state to start with, or a legal byte sequence\n (i.e. a valid code point) for character has been identified.\n\n ME state: This indicates that the state machine identified a byte sequence\n that is specific to the charset it is designed for and that\n there is no other possible encoding which can contain this byte\n sequence. This will to lead to an immediate positive answer for\n the detector.\n\n ERROR state: This indicates the state machine identified an illegal byte\n sequence for that encoding. This will lead to an immediate\n negative answer for this encoding. Detector will exclude this\n encoding from consideration from here on.\n " + + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + byte_class = self._model["class_table"][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model["char_len_table"][byte_class] + curr_state = self._curr_state * self._model["class_factor"] + byte_class + self._curr_state = self._model["state_table"][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model["name"] + + @property + def language(self): + return self._model["language"] diff --git a/APPS_UNCOMPILED/lib/chardet/compat.py b/APPS_UNCOMPILED/lib/chardet/compat.py new file mode 100644 index 0000000..0a5e1f2 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/compat.py @@ -0,0 +1,20 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/compat.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1200 bytes +import sys +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + string_types = (str, unicode) + text_type = unicode + iteritems = dict.iteritems +else: + PY2 = False + PY3 = True + string_types = (bytes, str) + text_type = str + iteritems = dict.items diff --git a/APPS_UNCOMPILED/lib/chardet/cp949prober.py b/APPS_UNCOMPILED/lib/chardet/cp949prober.py new file mode 100644 index 0000000..d520bc8 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/cp949prober.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/cp949prober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1855 bytes +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + +class CP949Prober(MultiByteCharSetProber): + + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/APPS_UNCOMPILED/lib/chardet/enums.py b/APPS_UNCOMPILED/lib/chardet/enums.py new file mode 100644 index 0000000..76c99df --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/enums.py @@ -0,0 +1,66 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/enums.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1661 bytes +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +class InputState(object): + __doc__ = "\n This enum represents the different states a universal detector can be in.\n " + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + __doc__ = "\n This enum represents the different language filters we can apply to a\n ``UniversalDetector``.\n " + CHINESE_SIMPLIFIED = 1 + CHINESE_TRADITIONAL = 2 + JAPANESE = 4 + KOREAN = 8 + NON_CJK = 16 + ALL = 31 + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + __doc__ = "\n This enum represents the different states a prober can be in.\n " + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + __doc__ = "\n This enum represents the different states a state machine can be in.\n " + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + __doc__ = "\n This enum represents the likelihood of a character following the previous one.\n " + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + __doc__ = "\n This enum represents the different categories language models for\n ``SingleByteCharsetProber`` put characters into.\n\n Anything less than CONTROL is considered a letter.\n " + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/APPS_UNCOMPILED/lib/chardet/escprober.py b/APPS_UNCOMPILED/lib/chardet/escprober.py new file mode 100644 index 0000000..4b54661 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/escprober.py @@ -0,0 +1,76 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/escprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 3950 bytes +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, ISO2022KR_SM_MODEL + +class EscCharSetProber(CharSetProber): + __doc__ = '\n This CharSetProber uses a "code scheme" approach for detecting encodings,\n whereby easily recognizable escape or shift sequences are relied on to\n identify these encodings.\n ' + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + return 0.0 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if coding_sm: + if not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/APPS_UNCOMPILED/lib/chardet/escsm.py b/APPS_UNCOMPILED/lib/chardet/escsm.py new file mode 100644 index 0000000..be86622 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/escsm.py @@ -0,0 +1,119 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/escsm.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 10510 bytes +from .enums import MachineState +HZ_CLS = (1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 4, 0, 5, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) +HZ_ST = ( + MachineState.START, MachineState.ERROR, 3, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, 4, MachineState.ERROR, + 5, MachineState.ERROR, 6, MachineState.ERROR, 5, 5, 4, MachineState.ERROR, + 4, MachineState.ERROR, 4, 4, 4, MachineState.ERROR, 4, MachineState.ERROR, + 4, MachineState.ITS_ME, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START) +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) +HZ_SM_MODEL = { + 'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': '"HZ-GB-2312"', + 'language': '"Chinese"'} +ISO2022CN_CLS = (2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2) +ISO2022CN_ST = ( + MachineState.START, 3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, + MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 4, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + 5, 6, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.START) +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) +ISO2022CN_SM_MODEL = { + 'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': '"ISO-2022-CN"', + 'language': '"Chinese"'} +ISO2022JP_CLS = (2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 3, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, + 4, 0, 8, 0, 0, 0, 0, 9, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2) +ISO2022JP_ST = ( + MachineState.START, 3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, + MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, 5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 4, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 6, MachineState.ITS_ME, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, MachineState.START, MachineState.START) +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) +ISO2022JP_SM_MODEL = { + 'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': '"ISO-2022-JP"', + 'language': '"Japanese"'} +ISO2022KR_CLS = (2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 4, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2) +ISO2022KR_ST = ( + MachineState.START, 3, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 4, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.START, MachineState.START, MachineState.START, MachineState.START) +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) +ISO2022KR_SM_MODEL = { + 'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': '"ISO-2022-KR"', + 'language': '"Korean"'} diff --git a/APPS_UNCOMPILED/lib/chardet/eucjpprober.py b/APPS_UNCOMPILED/lib/chardet/eucjpprober.py new file mode 100644 index 0000000..ebb14fa --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/eucjpprober.py @@ -0,0 +1,66 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/eucjpprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 3749 bytes +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + +class EUCJPProber(MultiByteCharSetProber): + + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug("%s %s prober hit error at byte %s", self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[(i - 1)[:i + 1]], char_len) + self.distribution_analyzer.feed(byte_str[(i - 1)[:i + 1]], char_len) + + self._last_char[0] = byte_str[-1] + if self.state == ProbingState.DETECTING: + if self.context_analyzer.got_enough_data(): + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/APPS_UNCOMPILED/lib/chardet/euckrfreq.py b/APPS_UNCOMPILED/lib/chardet/euckrfreq.py new file mode 100644 index 0000000..528605e --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/euckrfreq.py @@ -0,0 +1,245 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/euckrfreq.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 13546 bytes +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 +EUCKR_TABLE_SIZE = 2352 +EUCKR_CHAR_TO_FREQ_ORDER = (13, 130, 120, 1396, 481, 1719, 1720, 328, 609, 212, 1721, + 707, 400, 299, 1722, 87, 1397, 1723, 104, 536, 1117, + 1203, 1724, 1267, 685, 1268, 508, 1725, 1726, 1727, 1728, + 1398, 1399, 1729, 1730, 1731, 141, 621, 326, 1057, 368, + 1732, 267, 488, 20, 1733, 1269, 1734, 945, 1400, 1735, + 47, 904, 1270, 1736, 1737, 773, 248, 1738, 409, 313, + 786, 429, 1739, 116, 987, 813, 1401, 683, 75, 1204, 145, + 1740, 1741, 1742, 1743, 16, 847, 667, 622, 708, 1744, + 1745, 1746, 966, 787, 304, 129, 1747, 60, 820, 123, 676, + 1748, 1749, 1750, 1751, 617, 1752, 626, 1753, 1754, 1755, + 1756, 653, 1757, 1758, 1759, 1760, 1761, 1762, 856, 344, + 1763, 1764, 1765, 1766, 89, 401, 418, 806, 905, 848, + 1767, 1768, 1769, 946, 1205, 709, 1770, 1118, 1771, 241, + 1772, 1773, 1774, 1271, 1775, 569, 1776, 999, 1777, 1778, + 1779, 1780, 337, 751, 1058, 28, 628, 254, 1781, 177, + 906, 270, 349, 891, 1079, 1782, 19, 1783, 379, 1784, + 315, 1785, 629, 754, 1402, 559, 1786, 636, 203, 1206, + 1787, 710, 567, 1788, 935, 814, 1789, 1790, 1207, 766, + 528, 1791, 1792, 1208, 1793, 1794, 1795, 1796, 1797, + 1403, 1798, 1799, 533, 1059, 1404, 1405, 1156, 1406, + 936, 884, 1080, 1800, 351, 1801, 1802, 1803, 1804, 1805, + 801, 1806, 1807, 1808, 1119, 1809, 1157, 714, 474, 1407, + 1810, 298, 899, 885, 1811, 1120, 802, 1158, 1812, 892, + 1813, 1814, 1408, 659, 1815, 1816, 1121, 1817, 1818, + 1819, 1820, 1821, 1822, 319, 1823, 594, 545, 1824, 815, + 937, 1209, 1825, 1826, 573, 1409, 1022, 1827, 1210, 1828, + 1829, 1830, 1831, 1832, 1833, 556, 722, 807, 1122, 1060, + 1834, 697, 1835, 900, 557, 715, 1836, 1410, 540, 1411, + 752, 1159, 294, 597, 1211, 976, 803, 770, 1412, 1837, + 1838, 39, 794, 1413, 358, 1839, 371, 925, 1840, 453, + 661, 788, 531, 723, 544, 1023, 1081, 869, 91, 1841, 392, + 430, 790, 602, 1414, 677, 1082, 457, 1415, 1416, 1842, + 1843, 475, 327, 1024, 1417, 795, 121, 1844, 733, 403, + 1418, 1845, 1846, 1847, 300, 119, 711, 1212, 627, 1848, + 1272, 207, 1849, 1850, 796, 1213, 382, 1851, 519, 1852, + 1083, 893, 1853, 1854, 1855, 367, 809, 487, 671, 1856, + 663, 1857, 1858, 956, 471, 306, 857, 1859, 1860, 1160, + 1084, 1861, 1862, 1863, 1864, 1865, 1061, 1866, 1867, + 1868, 1869, 1870, 1871, 282, 96, 574, 1872, 502, 1085, + 1873, 1214, 1874, 907, 1875, 1876, 827, 977, 1419, 1420, + 1421, 268, 1877, 1422, 1878, 1879, 1880, 308, 1881, 2, + 537, 1882, 1883, 1215, 1884, 1885, 127, 791, 1886, 1273, + 1423, 1887, 34, 336, 404, 643, 1888, 571, 654, 894, 840, + 1889, 0, 886, 1274, 122, 575, 260, 908, 938, 1890, 1275, + 410, 316, 1891, 1892, 100, 1893, 1894, 1123, 48, 1161, + 1124, 1025, 1895, 633, 901, 1276, 1896, 1897, 115, 816, + 1898, 317, 1899, 694, 1900, 909, 734, 1424, 572, 866, + 1425, 691, 85, 524, 1010, 543, 394, 841, 1901, 1902, + 1903, 1026, 1904, 1905, 1906, 1907, 1908, 1909, 30, 451, + 651, 988, 310, 1910, 1911, 1426, 810, 1216, 93, 1912, + 1913, 1277, 1217, 1914, 858, 759, 45, 58, 181, 610, 269, + 1915, 1916, 131, 1062, 551, 443, 1000, 821, 1427, 957, + 895, 1086, 1917, 1918, 375, 1919, 359, 1920, 687, 1921, + 822, 1922, 293, 1923, 1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174, 1925, 69, 1162, 728, 1428, 910, 1926, + 1278, 1218, 1279, 386, 870, 217, 854, 1163, 823, 1927, + 1928, 1929, 1930, 834, 1931, 78, 1932, 859, 1933, 1063, + 1934, 1935, 1936, 1937, 438, 1164, 208, 595, 1938, 1939, + 1940, 1941, 1219, 1125, 1942, 280, 888, 1429, 1430, 1220, + 1431, 1943, 1944, 1945, 1946, 1947, 1280, 150, 510, 1432, + 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1011, 1087, + 1955, 1433, 1043, 1956, 881, 1957, 614, 958, 1064, 1065, + 1221, 1958, 638, 1001, 860, 967, 896, 1434, 989, 492, + 553, 1281, 1165, 1959, 1282, 1002, 1283, 1222, 1960, + 1961, 1962, 1963, 36, 383, 228, 753, 247, 454, 1964, + 876, 678, 1965, 1966, 1284, 126, 464, 490, 835, 136, + 672, 529, 940, 1088, 1435, 473, 1967, 1968, 467, 50, + 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, + 849, 882, 1126, 1285, 639, 1044, 133, 140, 288, 360, + 811, 563, 1027, 561, 142, 523, 1969, 1970, 1971, 7, 103, + 296, 439, 407, 506, 634, 990, 1972, 1973, 1974, 1975, + 645, 1976, 1977, 1978, 1979, 1980, 1981, 236, 1982, 1436, + 1983, 1984, 1089, 192, 828, 618, 518, 1166, 333, 1127, + 1985, 818, 1223, 1986, 1987, 1988, 1989, 1990, 1991, + 1992, 1993, 342, 1128, 1286, 746, 842, 1994, 1995, 560, + 223, 1287, 98, 8, 189, 650, 978, 1288, 1996, 1437, 1997, + 17, 345, 250, 423, 277, 234, 512, 226, 97, 289, 42, 167, + 1998, 201, 1999, 2000, 843, 836, 824, 532, 338, 783, + 1090, 182, 576, 436, 1438, 1439, 527, 500, 2001, 947, + 889, 2002, 2003, 2004, 2005, 262, 600, 314, 447, 2006, + 547, 2007, 693, 738, 1129, 2008, 71, 1440, 745, 619, + 688, 2009, 829, 2010, 2011, 147, 2012, 33, 948, 2013, + 2014, 74, 224, 2015, 61, 191, 918, 399, 637, 2016, 1028, + 1130, 257, 902, 2017, 2018, 2019, 2020, 2021, 2022, 2023, + 2024, 2025, 2026, 837, 2027, 2028, 2029, 2030, 179, 874, + 591, 52, 724, 246, 2031, 2032, 2033, 2034, 1167, 969, + 2035, 1289, 630, 605, 911, 1091, 1168, 2036, 2037, 2038, + 1441, 912, 2039, 623, 2040, 2041, 253, 1169, 1290, 2042, + 1442, 146, 620, 611, 577, 433, 2043, 1224, 719, 1170, + 959, 440, 437, 534, 84, 388, 480, 1131, 159, 220, 198, + 679, 2044, 1012, 819, 1066, 1443, 113, 1225, 194, 318, + 1003, 1029, 2045, 2046, 2047, 2048, 1067, 2049, 2050, + 2051, 2052, 2053, 59, 913, 112, 2054, 632, 2055, 455, + 144, 739, 1291, 2056, 273, 681, 499, 2057, 448, 2058, + 2059, 760, 2060, 2061, 970, 384, 169, 245, 1132, 2062, + 2063, 414, 1444, 2064, 2065, 41, 235, 2066, 157, 252, + 877, 568, 919, 789, 580, 2067, 725, 2068, 2069, 1292, + 2070, 2071, 1445, 2072, 1446, 2073, 2074, 55, 588, 66, + 1447, 271, 1092, 2075, 1226, 2076, 960, 1013, 372, 2077, + 2078, 2079, 2080, 2081, 1293, 2082, 2083, 2084, 2085, + 850, 2086, 2087, 2088, 2089, 2090, 186, 2091, 1068, 180, + 2092, 2093, 2094, 109, 1227, 522, 606, 2095, 867, 1448, + 1093, 991, 1171, 926, 353, 1133, 2096, 581, 2097, 2098, + 2099, 1294, 1449, 1450, 2100, 596, 1172, 1014, 1228, + 2101, 1451, 1295, 1173, 1229, 2102, 2103, 1296, 1134, + 1452, 949, 1135, 2104, 2105, 1094, 1453, 1454, 1455, + 2106, 1095, 2107, 2108, 2109, 2110, 2111, 2112, 2113, + 2114, 2115, 2116, 2117, 804, 2118, 2119, 1230, 1231, + 805, 1456, 405, 1136, 2120, 2121, 2122, 2123, 2124, 720, + 701, 1297, 992, 1457, 927, 1004, 2125, 2126, 2127, 2128, + 2129, 2130, 22, 417, 2131, 303, 2132, 385, 2133, 971, + 520, 513, 2134, 1174, 73, 1096, 231, 274, 962, 1458, + 673, 2135, 1459, 2136, 152, 1137, 2137, 2138, 2139, 2140, + 1005, 1138, 1460, 1139, 2141, 2142, 2143, 2144, 11, 374, + 844, 2145, 154, 1232, 46, 1461, 2146, 838, 830, 721, + 1233, 106, 2147, 90, 428, 462, 578, 566, 1175, 352, 2148, + 2149, 538, 1234, 124, 1298, 2150, 1462, 761, 565, 2151, + 686, 2152, 649, 2153, 72, 173, 2154, 460, 415, 2155, + 1463, 2156, 1235, 305, 2157, 2158, 2159, 2160, 2161, + 2162, 579, 2163, 2164, 2165, 2166, 2167, 747, 2168, 2169, + 2170, 2171, 1464, 669, 2172, 2173, 2174, 2175, 2176, + 1465, 2177, 23, 530, 285, 2178, 335, 729, 2179, 397, + 2180, 2181, 2182, 1030, 2183, 2184, 698, 2185, 2186, + 325, 2187, 2188, 369, 2189, 799, 1097, 1015, 348, 2190, + 1069, 680, 2191, 851, 1466, 2192, 2193, 10, 2194, 613, + 424, 2195, 979, 108, 449, 589, 27, 172, 81, 1031, 80, + 774, 281, 350, 1032, 525, 301, 582, 1176, 2196, 674, + 1045, 2197, 2198, 1467, 730, 762, 2199, 2200, 2201, 2202, + 1468, 2203, 993, 2204, 2205, 266, 1070, 963, 1140, 2206, + 2207, 2208, 664, 1098, 972, 2209, 2210, 2211, 1177, 1469, + 1470, 871, 2212, 2213, 2214, 2215, 2216, 1471, 2217, + 2218, 2219, 2220, 2221, 2222, 2223, 2224, 2225, 2226, + 2227, 1472, 1236, 2228, 2229, 2230, 2231, 2232, 2233, + 2234, 2235, 1299, 2236, 2237, 200, 2238, 477, 373, 2239, + 2240, 731, 825, 777, 2241, 2242, 2243, 521, 486, 548, + 2244, 2245, 2246, 1473, 1300, 53, 549, 137, 875, 76, + 158, 2247, 1301, 1474, 469, 396, 1016, 278, 712, 2248, + 321, 442, 503, 767, 744, 941, 1237, 1178, 1475, 2249, + 82, 178, 1141, 1179, 973, 2250, 1302, 2251, 297, 2252, + 2253, 570, 2254, 2255, 2256, 18, 450, 206, 2257, 290, + 292, 1142, 2258, 511, 162, 99, 346, 164, 735, 2259, 1476, + 1477, 4, 554, 343, 798, 1099, 2260, 1100, 2261, 43, 171, + 1303, 139, 215, 2262, 2263, 717, 775, 2264, 1033, 322, + 216, 2265, 831, 2266, 149, 2267, 1304, 2268, 2269, 702, + 1238, 135, 845, 347, 309, 2270, 484, 2271, 878, 655, + 238, 1006, 1478, 2272, 67, 2273, 295, 2274, 2275, 461, + 2276, 478, 942, 412, 2277, 1034, 2278, 2279, 2280, 265, + 2281, 541, 2282, 2283, 2284, 2285, 2286, 70, 852, 1071, + 2287, 2288, 2289, 2290, 21, 56, 509, 117, 432, 2291, + 2292, 331, 980, 552, 1101, 148, 284, 105, 393, 1180, + 1239, 755, 2293, 187, 2294, 1046, 1479, 2295, 340, 2296, + 63, 1047, 230, 2297, 2298, 1305, 763, 1306, 101, 800, + 808, 494, 2299, 2300, 2301, 903, 2302, 37, 1072, 14, + 5, 2303, 79, 675, 2304, 312, 2305, 2306, 2307, 2308, + 2309, 1480, 6, 1307, 2310, 2311, 2312, 1, 470, 35, 24, + 229, 2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, + 77, 855, 964, 2314, 259, 2315, 501, 380, 2316, 2317, + 83, 981, 153, 689, 1308, 1481, 1482, 1483, 2318, 2319, + 716, 1484, 2320, 2321, 2322, 2323, 2324, 2325, 1485, + 2326, 2327, 128, 57, 68, 261, 1048, 211, 170, 1240, 31, + 2328, 51, 435, 742, 2329, 2330, 2331, 635, 2332, 264, + 456, 2333, 2334, 2335, 425, 2336, 1486, 143, 507, 263, + 943, 2337, 363, 920, 1487, 256, 1488, 1102, 243, 601, + 1489, 2338, 2339, 2340, 2341, 2342, 2343, 2344, 861, + 2345, 2346, 2347, 2348, 2349, 2350, 395, 2351, 1490, + 1491, 62, 535, 166, 225, 2352, 2353, 668, 419, 1241, + 138, 604, 928, 2354, 1181, 2355, 1492, 1493, 2356, 2357, + 2358, 1143, 2359, 696, 2360, 387, 307, 1309, 682, 476, + 2361, 2362, 332, 12, 222, 156, 2363, 232, 2364, 641, + 276, 656, 517, 1494, 1495, 1035, 416, 736, 1496, 2365, + 1017, 586, 2366, 2367, 2368, 1497, 2369, 242, 2370, 2371, + 2372, 1498, 2373, 965, 713, 2374, 2375, 2376, 2377, 740, + 982, 1499, 944, 1500, 1007, 2378, 2379, 1310, 1501, 2380, + 2381, 2382, 785, 329, 2383, 2384, 1502, 2385, 2386, 2387, + 932, 2388, 1503, 2389, 2390, 2391, 2392, 1242, 2393, + 2394, 2395, 2396, 2397, 994, 950, 2398, 2399, 2400, 2401, + 1504, 1311, 2402, 2403, 2404, 2405, 1049, 749, 2406, + 2407, 853, 718, 1144, 1312, 2408, 1182, 1505, 2409, 2410, + 255, 516, 479, 564, 550, 214, 1506, 1507, 1313, 413, + 239, 444, 339, 1145, 1036, 1508, 1509, 1314, 1037, 1510, + 1315, 2411, 1511, 2412, 2413, 2414, 176, 703, 497, 624, + 593, 921, 302, 2415, 341, 165, 1103, 1512, 2416, 1513, + 2417, 2418, 2419, 376, 2420, 700, 2421, 2422, 2423, 258, + 768, 1316, 2424, 1183, 2425, 995, 608, 2426, 2427, 2428, + 2429, 221, 2430, 2431, 2432, 2433, 2434, 2435, 2436, + 2437, 195, 323, 726, 188, 897, 983, 1317, 377, 644, 1050, + 879, 2438, 452, 2439, 2440, 2441, 2442, 2443, 2444, 914, + 2445, 2446, 2447, 2448, 915, 489, 2449, 1514, 1184, 2450, + 2451, 515, 64, 427, 495, 2452, 583, 2453, 483, 485, 1038, + 562, 213, 1515, 748, 666, 2454, 2455, 2456, 2457, 334, + 2458, 780, 996, 1008, 705, 1243, 2459, 2460, 2461, 2462, + 2463, 114, 2464, 493, 1146, 366, 163, 1516, 961, 1104, + 2465, 291, 2466, 1318, 1105, 2467, 1517, 365, 2468, 355, + 951, 1244, 2469, 1319, 2470, 631, 2471, 2472, 218, 1320, + 364, 320, 756, 1518, 1519, 1321, 1520, 1322, 2473, 2474, + 2475, 2476, 997, 2477, 2478, 2479, 2480, 665, 1185, 2481, + 916, 1521, 2482, 2483, 2484, 584, 684, 2485, 2486, 797, + 2487, 1051, 1186, 2488, 2489, 2490, 1522, 2491, 2492, + 370, 2493, 1039, 1187, 65, 2494, 434, 205, 463, 1188, + 2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, + 771, 585, 2496, 590, 505, 1073, 2497, 599, 244, 219, + 917, 1018, 952, 646, 1523, 2498, 1323, 2499, 2500, 49, + 984, 354, 741, 2501, 625, 2502, 1324, 2503, 1019, 190, + 357, 757, 491, 95, 782, 868, 2504, 2505, 2506, 2507, + 2508, 2509, 134, 1524, 1074, 422, 1525, 898, 2510, 161, + 2511, 2512, 2513, 2514, 769, 2515, 1526, 2516, 2517, + 411, 1325, 2518, 472, 1527, 2519, 2520, 2521, 2522, 2523, + 2524, 985, 2525, 2526, 2527, 2528, 2529, 2530, 764, 2531, + 1245, 2532, 2533, 25, 204, 311, 2534, 496, 2535, 1052, + 2536, 2537, 2538, 2539, 2540, 2541, 2542, 199, 704, 504, + 468, 758, 657, 1528, 196, 44, 839, 1246, 272, 750, 2543, + 765, 862, 2544, 2545, 1326, 2546, 132, 615, 933, 2547, + 732, 2548, 2549, 2550, 1189, 1529, 2551, 283, 1247, 1053, + 607, 929, 2552, 2553, 2554, 930, 183, 872, 616, 1040, + 1147, 2555, 1148, 1020, 441, 249, 1075, 2556, 2557, 2558, + 466, 743, 2559, 2560, 2561, 92, 514, 426, 420, 526, 2562, + 2563, 2564, 2565, 2566, 2567, 2568, 185, 2569, 2570, + 2571, 2572, 776, 1530, 658, 2573, 362, 2574, 361, 922, + 1076, 793, 2575, 2576, 2577, 2578, 2579, 2580, 1531, + 251, 2581, 2582, 2583, 2584, 1532, 54, 612, 237, 1327, + 2585, 2586, 275, 408, 647, 111, 2587, 1533, 1106, 465, + 3, 458, 9, 38, 2588, 107, 110, 890, 209, 26, 737, 498, + 2589, 1534, 2590, 431, 202, 88, 1535, 356, 287, 1107, + 660, 1149, 2591, 381, 1536, 986, 1150, 445, 1248, 1151, + 974, 2592, 2593, 846, 2594, 446, 953, 184, 1249, 1250, + 727, 2595, 923, 193, 883, 2596, 2597, 2598, 102, 324, + 539, 817, 2599, 421, 1041, 2600, 832, 2601, 94, 175, + 197, 406, 2602, 459, 2603, 2604, 2605, 2606, 2607, 330, + 555, 2608, 2609, 2610, 706, 1108, 389, 2611, 2612, 2613, + 2614, 233, 2615, 833, 558, 931, 954, 1251, 2616, 2617, + 1537, 546, 2618, 2619, 1009, 2620, 2621, 2622, 1538, + 690, 1328, 2623, 955, 2624, 1539, 2625, 2626, 772, 2627, + 2628, 2629, 2630, 2631, 924, 648, 863, 603, 2632, 2633, + 934, 1540, 864, 865, 2634, 642, 1042, 670, 1190, 2635, + 2636, 2637, 2638, 168, 2639, 652, 873, 542, 1054, 1541, + 2640, 2641, 2642) diff --git a/APPS_UNCOMPILED/lib/chardet/euckrprober.py b/APPS_UNCOMPILED/lib/chardet/euckrprober.py new file mode 100644 index 0000000..4609a79 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/euckrprober.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/euckrprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1748 bytes +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + +class EUCKRProber(MultiByteCharSetProber): + + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/APPS_UNCOMPILED/lib/chardet/euctwfreq.py b/APPS_UNCOMPILED/lib/chardet/euctwfreq.py new file mode 100644 index 0000000..886383e --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/euctwfreq.py @@ -0,0 +1,575 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/euctwfreq.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 31621 bytes +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 +EUCTW_TABLE_SIZE = 5376 +EUCTW_CHAR_TO_FREQ_ORDER = (1, 1800, 1506, 255, 1431, 198, 9, 82, 6, 7310, 177, 202, + 3615, 1256, 2808, 110, 3735, 33, 3241, 261, 76, 44, 2113, + 16, 2931, 2184, 1176, 659, 3868, 26, 3404, 2643, 1198, + 3869, 3313, 4060, 410, 2211, 302, 590, 361, 1963, 8, + 204, 58, 4296, 7311, 1931, 63, 7312, 7313, 317, 1614, + 75, 222, 159, 4061, 2412, 1480, 7314, 3500, 3068, 224, + 2809, 3616, 3, 10, 3870, 1471, 29, 2774, 1135, 2852, + 1939, 873, 130, 3242, 1123, 312, 7315, 4297, 2051, 507, + 252, 682, 7316, 142, 1914, 124, 206, 2932, 34, 3501, + 3173, 64, 604, 7317, 2494, 1976, 1977, 155, 1990, 645, + 641, 1606, 7318, 3405, 337, 72, 406, 7319, 80, 630, 238, + 3174, 1509, 263, 939, 1092, 2644, 756, 1440, 1094, 3406, + 449, 69, 2969, 591, 179, 2095, 471, 115, 2034, 1843, + 60, 50, 2970, 134, 806, 1868, 734, 2035, 3407, 180, 995, + 1607, 156, 537, 2893, 688, 7320, 319, 1305, 779, 2144, + 514, 2374, 298, 4298, 359, 2495, 90, 2707, 1338, 663, + 11, 906, 1099, 2545, 20, 2436, 182, 532, 1716, 7321, + 732, 1376, 4062, 1311, 1420, 3175, 25, 2312, 1056, 113, + 399, 382, 1949, 242, 3408, 2467, 529, 3243, 475, 1447, + 3617, 7322, 117, 21, 656, 810, 1297, 2295, 2329, 3502, + 7323, 126, 4063, 706, 456, 150, 613, 4299, 71, 1118, + 2036, 4064, 145, 3069, 85, 835, 486, 2114, 1246, 1426, + 428, 727, 1285, 1015, 800, 106, 623, 303, 1281, 7324, + 2127, 2354, 347, 3736, 221, 3503, 3110, 7325, 1955, 1153, + 4065, 83, 296, 1199, 3070, 192, 624, 93, 7326, 822, 1897, + 2810, 3111, 795, 2064, 991, 1554, 1542, 1592, 27, 43, + 2853, 859, 139, 1456, 860, 4300, 437, 712, 3871, 164, + 2392, 3112, 695, 211, 3017, 2096, 195, 3872, 1608, 3504, + 3505, 3618, 3873, 234, 811, 2971, 2097, 3874, 2229, 1441, + 3506, 1615, 2375, 668, 2076, 1638, 305, 228, 1664, 4301, + 467, 415, 7327, 262, 2098, 1593, 239, 108, 300, 200, + 1033, 512, 1247, 2077, 7328, 7329, 2173, 3176, 3619, + 2673, 593, 845, 1062, 3244, 88, 1723, 2037, 3875, 1950, + 212, 266, 152, 149, 468, 1898, 4066, 4302, 77, 187, 7330, + 3018, 37, 5, 2972, 7331, 3876, 7332, 7333, 39, 2517, + 4303, 2894, 3177, 2078, 55, 148, 74, 4304, 545, 483, + 1474, 1029, 1665, 217, 1869, 1531, 3113, 1104, 2645, + 4067, 24, 172, 3507, 900, 3877, 3508, 3509, 4305, 32, + 1408, 2811, 1312, 329, 487, 2355, 2247, 2708, 784, 2674, + 4, 3019, 3314, 1427, 1788, 188, 109, 499, 7334, 3620, + 1717, 1789, 888, 1217, 3020, 4306, 7335, 3510, 7336, + 3315, 1520, 3621, 3878, 196, 1034, 775, 7337, 7338, 929, + 1815, 249, 439, 38, 7339, 1063, 7340, 794, 3879, 1435, + 2296, 46, 178, 3245, 2065, 7341, 2376, 7342, 214, 1709, + 4307, 804, 35, 707, 324, 3622, 1601, 2546, 140, 459, + 4068, 7343, 7344, 1365, 839, 272, 978, 2257, 2572, 3409, + 2128, 1363, 3623, 1423, 697, 100, 3071, 48, 70, 1231, + 495, 3114, 2193, 7345, 1294, 7346, 2079, 462, 586, 1042, + 3246, 853, 256, 988, 185, 2377, 3410, 1698, 434, 1084, + 7347, 3411, 314, 2615, 2775, 4308, 2330, 2331, 569, 2280, + 637, 1816, 2518, 757, 1162, 1878, 1616, 3412, 287, 1577, + 2115, 768, 4309, 1671, 2854, 3511, 2519, 1321, 3737, + 909, 2413, 7348, 4069, 933, 3738, 7349, 2052, 2356, 1222, + 4310, 765, 2414, 1322, 786, 4311, 7350, 1919, 1462, 1677, + 2895, 1699, 7351, 4312, 1424, 2437, 3115, 3624, 2590, + 3316, 1774, 1940, 3413, 3880, 4070, 309, 1369, 1130, + 2812, 364, 2230, 1653, 1299, 3881, 3512, 3882, 3883, + 2646, 525, 1085, 3021, 902, 2000, 1475, 964, 4313, 421, + 1844, 1415, 1057, 2281, 940, 1364, 3116, 376, 4314, 4315, + 1381, 7, 2520, 983, 2378, 336, 1710, 2675, 1845, 321, + 3414, 559, 1131, 3022, 2742, 1808, 1132, 1313, 265, 1481, + 1857, 7352, 352, 1203, 2813, 3247, 167, 1089, 420, 2814, + 776, 792, 1724, 3513, 4071, 2438, 3248, 7353, 4072, 7354, + 446, 229, 333, 2743, 901, 3739, 1200, 1557, 4316, 2647, + 1920, 395, 2744, 2676, 3740, 4073, 1835, 125, 916, 3178, + 2616, 4317, 7355, 7356, 3741, 7357, 7358, 7359, 4318, + 3117, 3625, 1133, 2547, 1757, 3415, 1510, 2313, 1409, + 3514, 7360, 2145, 438, 2591, 2896, 2379, 3317, 1068, + 958, 3023, 461, 311, 2855, 2677, 4074, 1915, 3179, 4075, + 1978, 383, 750, 2745, 2617, 4076, 274, 539, 385, 1278, + 1442, 7361, 1154, 1964, 384, 561, 210, 98, 1295, 2548, + 3515, 7362, 1711, 2415, 1482, 3416, 3884, 2897, 1257, + 129, 7363, 3742, 642, 523, 2776, 2777, 2648, 7364, 141, + 2231, 1333, 68, 176, 441, 876, 907, 4077, 603, 2592, + 710, 171, 3417, 404, 549, 18, 3118, 2393, 1410, 3626, + 1666, 7365, 3516, 4319, 2898, 4320, 7366, 2973, 368, + 7367, 146, 366, 99, 871, 3627, 1543, 748, 807, 1586, + 1185, 22, 2258, 379, 3743, 3180, 7368, 3181, 505, 1941, + 2618, 1991, 1382, 2314, 7369, 380, 2357, 218, 702, 1817, + 1248, 3418, 3024, 3517, 3318, 3249, 7370, 2974, 3628, + 930, 3250, 3744, 7371, 59, 7372, 585, 601, 4078, 497, + 3419, 1112, 1314, 4321, 1801, 7373, 1223, 1472, 2174, + 7374, 749, 1836, 690, 1899, 3745, 1772, 3885, 1476, 429, + 1043, 1790, 2232, 2116, 917, 4079, 447, 1086, 1629, 7375, + 556, 7376, 7377, 2020, 1654, 844, 1090, 105, 550, 966, + 1758, 2815, 1008, 1782, 686, 1095, 7378, 2282, 793, 1602, + 7379, 3518, 2593, 4322, 4080, 2933, 2297, 4323, 3746, + 980, 2496, 544, 353, 527, 4324, 908, 2678, 2899, 7380, + 381, 2619, 1942, 1348, 7381, 1341, 1252, 560, 3072, 7382, + 3420, 2856, 7383, 2053, 973, 886, 2080, 143, 4325, 7384, + 7385, 157, 3886, 496, 4081, 57, 840, 540, 2038, 4326, + 4327, 3421, 2117, 1445, 970, 2259, 1748, 1965, 2081, + 4082, 3119, 1234, 1775, 3251, 2816, 3629, 773, 1206, + 2129, 1066, 2039, 1326, 3887, 1738, 1725, 4083, 279, + 3120, 51, 1544, 2594, 423, 1578, 2130, 2066, 173, 4328, + 1879, 7386, 7387, 1583, 264, 610, 3630, 4329, 2439, 280, + 154, 7388, 7389, 7390, 1739, 338, 1282, 3073, 693, 2857, + 1411, 1074, 3747, 2440, 7391, 4330, 7392, 7393, 1240, + 952, 2394, 7394, 2900, 1538, 2679, 685, 1483, 4084, 2468, + 1436, 953, 4085, 2054, 4331, 671, 2395, 79, 4086, 2441, + 3252, 608, 567, 2680, 3422, 4087, 4088, 1691, 393, 1261, + 1791, 2396, 7395, 4332, 7396, 7397, 7398, 7399, 1383, + 1672, 3748, 3182, 1464, 522, 1119, 661, 1150, 216, 675, + 4333, 3888, 1432, 3519, 609, 4334, 2681, 2397, 7400, + 7401, 7402, 4089, 3025, 0, 7403, 2469, 315, 231, 2442, + 301, 3319, 4335, 2380, 7404, 233, 4090, 3631, 1818, 4336, + 4337, 7405, 96, 1776, 1315, 2082, 7406, 257, 7407, 1809, + 3632, 2709, 1139, 1819, 4091, 2021, 1124, 2163, 2778, + 1777, 2649, 7408, 3074, 363, 1655, 3183, 7409, 2975, + 7410, 7411, 7412, 3889, 1567, 3890, 718, 103, 3184, 849, + 1443, 341, 3320, 2934, 1484, 7413, 1712, 127, 67, 339, + 4092, 2398, 679, 1412, 821, 7414, 7415, 834, 738, 351, + 2976, 2146, 846, 235, 1497, 1880, 418, 1992, 3749, 2710, + 186, 1100, 2147, 2746, 3520, 1545, 1355, 2935, 2858, + 1377, 583, 3891, 4093, 2573, 2977, 7416, 1298, 3633, + 1078, 2549, 3634, 2358, 78, 3750, 3751, 267, 1289, 2099, + 2001, 1594, 4094, 348, 369, 1274, 2194, 2175, 1837, 4338, + 1820, 2817, 3635, 2747, 2283, 2002, 4339, 2936, 2748, + 144, 3321, 882, 4340, 3892, 2749, 3423, 4341, 2901, 7417, + 4095, 1726, 320, 7418, 3893, 3026, 788, 2978, 7419, 2818, + 1773, 1327, 2859, 3894, 2819, 7420, 1306, 4342, 2003, + 1700, 3752, 3521, 2359, 2650, 787, 2022, 506, 824, 3636, + 534, 323, 4343, 1044, 3322, 2023, 1900, 946, 3424, 7421, + 1778, 1500, 1678, 7422, 1881, 4344, 165, 243, 4345, 3637, + 2521, 123, 683, 4096, 764, 4346, 36, 3895, 1792, 589, + 2902, 816, 626, 1667, 3027, 2233, 1639, 1555, 1622, 3753, + 3896, 7423, 3897, 2860, 1370, 1228, 1932, 891, 2083, + 2903, 304, 4097, 7424, 292, 2979, 2711, 3522, 691, 2100, + 4098, 1115, 4347, 118, 662, 7425, 611, 1156, 854, 2381, + 1316, 2861, 2, 386, 515, 2904, 7426, 7427, 3253, 868, + 2234, 1486, 855, 2651, 785, 2212, 3028, 7428, 1040, 3185, + 3523, 7429, 3121, 448, 7430, 1525, 7431, 2164, 4348, + 7432, 3754, 7433, 4099, 2820, 3524, 3122, 503, 818, 3898, + 3123, 1568, 814, 676, 1444, 306, 1749, 7434, 3755, 1416, + 1030, 197, 1428, 805, 2821, 1501, 4349, 7435, 7436, 7437, + 1993, 7438, 4350, 7439, 7440, 2195, 13, 2779, 3638, 2980, + 3124, 1229, 1916, 7441, 3756, 2131, 7442, 4100, 4351, + 2399, 3525, 7443, 2213, 1511, 1727, 1120, 7444, 7445, + 646, 3757, 2443, 307, 7446, 7447, 1595, 3186, 7448, 7449, + 7450, 3639, 1113, 1356, 3899, 1465, 2522, 2523, 7451, + 519, 7452, 128, 2132, 92, 2284, 1979, 7453, 3900, 1512, + 342, 3125, 2196, 7454, 2780, 2214, 1980, 3323, 7455, + 290, 1656, 1317, 789, 827, 2360, 7456, 3758, 4352, 562, + 581, 3901, 7457, 401, 4353, 2248, 94, 4354, 1399, 2781, + 7458, 1463, 2024, 4355, 3187, 1943, 7459, 828, 1105, + 4101, 1262, 1394, 7460, 4102, 605, 4356, 7461, 1783, + 2862, 7462, 2822, 819, 2101, 578, 2197, 2937, 7463, 1502, + 436, 3254, 4103, 3255, 2823, 3902, 2905, 3425, 3426, + 7464, 2712, 2315, 7465, 7466, 2332, 2067, 23, 4357, 193, + 826, 3759, 2102, 699, 1630, 4104, 3075, 390, 1793, 1064, + 3526, 7467, 1579, 3076, 3077, 1400, 7468, 4105, 1838, + 1640, 2863, 7469, 4358, 4359, 137, 4106, 598, 3078, 1966, + 780, 104, 974, 2938, 7470, 278, 899, 253, 402, 572, 504, + 493, 1339, 7471, 3903, 1275, 4360, 2574, 2550, 7472, + 3640, 3029, 3079, 2249, 565, 1334, 2713, 863, 41, 7473, + 7474, 4361, 7475, 1657, 2333, 19, 463, 2750, 4107, 606, + 7476, 2981, 3256, 1087, 2084, 1323, 2652, 2982, 7477, + 1631, 1623, 1750, 4108, 2682, 7478, 2864, 791, 2714, + 2653, 2334, 232, 2416, 7479, 2983, 1498, 7480, 2654, + 2620, 755, 1366, 3641, 3257, 3126, 2025, 1609, 119, 1917, + 3427, 862, 1026, 4109, 7481, 3904, 3760, 4362, 3905, + 4363, 2260, 1951, 2470, 7482, 1125, 817, 4110, 4111, + 3906, 1513, 1766, 2040, 1487, 4112, 3030, 3258, 2824, + 3761, 3127, 7483, 7484, 1507, 7485, 2683, 733, 40, 1632, + 1106, 2865, 345, 4113, 841, 2524, 230, 4364, 2984, 1846, + 3259, 3428, 7486, 1263, 986, 3429, 7487, 735, 879, 254, + 1137, 857, 622, 1300, 1180, 1388, 1562, 3907, 3908, 2939, + 967, 2751, 2655, 1349, 592, 2133, 1692, 3324, 2985, 1994, + 4114, 1679, 3909, 1901, 2185, 7488, 739, 3642, 2715, + 1296, 1290, 7489, 4115, 2198, 2199, 1921, 1563, 2595, + 2551, 1870, 2752, 2986, 7490, 435, 7491, 343, 1108, 596, + 17, 1751, 4365, 2235, 3430, 3643, 7492, 4366, 294, 3527, + 2940, 1693, 477, 979, 281, 2041, 3528, 643, 2042, 3644, + 2621, 2782, 2261, 1031, 2335, 2134, 2298, 3529, 4367, + 367, 1249, 2552, 7493, 3530, 7494, 4368, 1283, 3325, + 2004, 240, 1762, 3326, 4369, 4370, 836, 1069, 3128, 474, + 7495, 2148, 2525, 268, 3531, 7496, 3188, 1521, 1284, + 7497, 1658, 1546, 4116, 7498, 3532, 3533, 7499, 4117, + 3327, 2684, 1685, 4118, 961, 1673, 2622, 190, 2005, 2200, + 3762, 4371, 4372, 7500, 570, 2497, 3645, 1490, 7501, + 4373, 2623, 3260, 1956, 4374, 584, 1514, 396, 1045, 1944, + 7502, 4375, 1967, 2444, 7503, 7504, 4376, 3910, 619, + 7505, 3129, 3261, 215, 2006, 2783, 2553, 3189, 4377, + 3190, 4378, 763, 4119, 3763, 4379, 7506, 7507, 1957, + 1767, 2941, 3328, 3646, 1174, 452, 1477, 4380, 3329, + 3130, 7508, 2825, 1253, 2382, 2186, 1091, 2285, 4120, + 492, 7509, 638, 1169, 1824, 2135, 1752, 3911, 648, 926, + 1021, 1324, 4381, 520, 4382, 997, 847, 1007, 892, 4383, + 3764, 2262, 1871, 3647, 7510, 2400, 1784, 4384, 1952, + 2942, 3080, 3191, 1728, 4121, 2043, 3648, 4385, 2007, + 1701, 3131, 1551, 30, 2263, 4122, 7511, 2026, 4386, 3534, + 7512, 501, 7513, 4123, 594, 3431, 2165, 1821, 3535, 3432, + 3536, 3192, 829, 2826, 4124, 7514, 1680, 3132, 1225, + 4125, 7515, 3262, 4387, 4126, 3133, 2336, 7516, 4388, + 4127, 7517, 3912, 3913, 7518, 1847, 2383, 2596, 3330, + 7519, 4389, 374, 3914, 652, 4128, 4129, 375, 1140, 798, + 7520, 7521, 7522, 2361, 4390, 2264, 546, 1659, 138, 3031, + 2445, 4391, 7523, 2250, 612, 1848, 910, 796, 3765, 1740, + 1371, 825, 3766, 3767, 7524, 2906, 2554, 7525, 692, 444, + 3032, 2624, 801, 4392, 4130, 7526, 1491, 244, 1053, 3033, + 4131, 4132, 340, 7527, 3915, 1041, 2987, 293, 1168, 87, + 1357, 7528, 1539, 959, 7529, 2236, 721, 694, 4133, 3768, + 219, 1478, 644, 1417, 3331, 2656, 1413, 1401, 1335, 1389, + 3916, 7530, 7531, 2988, 2362, 3134, 1825, 730, 1515, + 184, 2827, 66, 4393, 7532, 1660, 2943, 246, 3332, 378, + 1457, 226, 3433, 975, 3917, 2944, 1264, 3537, 674, 696, + 7533, 163, 7534, 1141, 2417, 2166, 713, 3538, 3333, 4394, + 3918, 7535, 7536, 1186, 15, 7537, 1079, 1070, 7538, 1522, + 3193, 3539, 276, 1050, 2716, 758, 1126, 653, 2945, 3263, + 7539, 2337, 889, 3540, 3919, 3081, 2989, 903, 1250, 4395, + 3920, 3434, 3541, 1342, 1681, 1718, 766, 3264, 286, 89, + 2946, 3649, 7540, 1713, 7541, 2597, 3334, 2990, 7542, + 2947, 2215, 3194, 2866, 7543, 4396, 2498, 2526, 181, + 387, 1075, 3921, 731, 2187, 3335, 7544, 3265, 310, 313, + 3435, 2299, 770, 4134, 54, 3034, 189, 4397, 3082, 3769, + 3922, 7545, 1230, 1617, 1849, 355, 3542, 4135, 4398, + 3336, 111, 4136, 3650, 1350, 3135, 3436, 3035, 4137, + 2149, 3266, 3543, 7546, 2784, 3923, 3924, 2991, 722, + 2008, 7547, 1071, 247, 1207, 2338, 2471, 1378, 4399, + 2009, 864, 1437, 1214, 4400, 373, 3770, 1142, 2216, 667, + 4401, 442, 2753, 2555, 3771, 3925, 1968, 4138, 3267, + 1839, 837, 170, 1107, 934, 1336, 1882, 7548, 7549, 2118, + 4139, 2828, 743, 1569, 7550, 4402, 4140, 582, 2384, 1418, + 3437, 7551, 1802, 7552, 357, 1395, 1729, 3651, 3268, + 2418, 1564, 2237, 7553, 3083, 3772, 1633, 4403, 1114, + 2085, 4141, 1532, 7554, 482, 2446, 4404, 7555, 7556, + 1492, 833, 1466, 7557, 2717, 3544, 1641, 2829, 7558, + 1526, 1272, 3652, 4142, 1686, 1794, 416, 2556, 1902, + 1953, 1803, 7559, 3773, 2785, 3774, 1159, 2316, 7560, + 2867, 4405, 1610, 1584, 3036, 2419, 2754, 443, 3269, + 1163, 3136, 7561, 7562, 3926, 7563, 4143, 2499, 3037, + 4406, 3927, 3137, 2103, 1647, 3545, 2010, 1872, 4144, + 7564, 4145, 431, 3438, 7565, 250, 97, 81, 4146, 7566, + 1648, 1850, 1558, 160, 848, 7567, 866, 740, 1694, 7568, + 2201, 2830, 3195, 4147, 4407, 3653, 1687, 950, 2472, + 426, 469, 3196, 3654, 3655, 3928, 7569, 7570, 1188, 424, + 1995, 861, 3546, 4148, 3775, 2202, 2685, 168, 1235, 3547, + 4149, 7571, 2086, 1674, 4408, 3337, 3270, 220, 2557, + 1009, 7572, 3776, 670, 2992, 332, 1208, 717, 7573, 7574, + 3548, 2447, 3929, 3338, 7575, 513, 7576, 1209, 2868, + 3339, 3138, 4409, 1080, 7577, 7578, 7579, 7580, 2527, + 3656, 3549, 815, 1587, 3930, 3931, 7581, 3550, 3439, + 3777, 1254, 4410, 1328, 3038, 1390, 3932, 1741, 3933, + 3778, 3934, 7582, 236, 3779, 2448, 3271, 7583, 7584, + 3657, 3780, 1273, 3781, 4411, 7585, 308, 7586, 4412, + 245, 4413, 1851, 2473, 1307, 2575, 430, 715, 2136, 2449, + 7587, 270, 199, 2869, 3935, 7588, 3551, 2718, 1753, 761, + 1754, 725, 1661, 1840, 4414, 3440, 3658, 7589, 7590, + 587, 14, 3272, 227, 2598, 326, 480, 2265, 943, 2755, + 3552, 291, 650, 1883, 7591, 1702, 1226, 102, 1547, 62, + 3441, 904, 4415, 3442, 1164, 4150, 7592, 7593, 1224, + 1548, 2756, 391, 498, 1493, 7594, 1386, 1419, 7595, 2055, + 1177, 4416, 813, 880, 1081, 2363, 566, 1145, 4417, 2286, + 1001, 1035, 2558, 2599, 2238, 394, 1286, 7596, 7597, + 2068, 7598, 86, 1494, 1730, 3936, 491, 1588, 745, 897, + 2948, 843, 3340, 3937, 2757, 2870, 3273, 1768, 998, 2217, + 2069, 397, 1826, 1195, 1969, 3659, 2993, 3341, 284, 7599, + 3782, 2500, 2137, 2119, 1903, 7600, 3938, 2150, 3939, + 4151, 1036, 3443, 1904, 114, 2559, 4152, 209, 1527, 7601, + 7602, 2949, 2831, 2625, 2385, 2719, 3139, 812, 2560, + 7603, 3274, 7604, 1559, 737, 1884, 3660, 1210, 885, 28, + 2686, 3553, 3783, 7605, 4153, 1004, 1779, 4418, 7606, + 346, 1981, 2218, 2687, 4419, 3784, 1742, 797, 1642, 3940, + 1933, 1072, 1384, 2151, 896, 3941, 3275, 3661, 3197, + 2871, 3554, 7607, 2561, 1958, 4420, 2450, 1785, 7608, + 7609, 7610, 3942, 4154, 1005, 1308, 3662, 4155, 2720, + 4421, 4422, 1528, 2600, 161, 1178, 4156, 1982, 987, 4423, + 1101, 4157, 631, 3943, 1157, 3198, 2420, 1343, 1241, + 1016, 2239, 2562, 372, 877, 2339, 2501, 1160, 555, 1934, + 911, 3944, 7611, 466, 1170, 169, 1051, 2907, 2688, 3663, + 2474, 2994, 1182, 2011, 2563, 1251, 2626, 7612, 992, + 2340, 3444, 1540, 2721, 1201, 2070, 2401, 1996, 2475, + 7613, 4424, 528, 1922, 2188, 1503, 1873, 1570, 2364, + 3342, 3276, 7614, 557, 1073, 7615, 1827, 3445, 2087, + 2266, 3140, 3039, 3084, 767, 3085, 2786, 4425, 1006, + 4158, 4426, 2341, 1267, 2176, 3664, 3199, 778, 3945, + 3200, 2722, 1597, 2657, 7616, 4427, 7617, 3446, 7618, + 7619, 7620, 3277, 2689, 1433, 3278, 131, 95, 1504, 3946, + 723, 4159, 3141, 1841, 3555, 2758, 2189, 3947, 2027, + 2104, 3665, 7621, 2995, 3948, 1218, 7622, 3343, 3201, + 3949, 4160, 2576, 248, 1634, 3785, 912, 7623, 2832, 3666, + 3040, 3786, 654, 53, 7624, 2996, 7625, 1688, 4428, 777, + 3447, 1032, 3950, 1425, 7626, 191, 820, 2120, 2833, 971, + 4429, 931, 3202, 135, 664, 783, 3787, 1997, 772, 2908, + 1935, 3951, 3788, 4430, 2909, 3203, 282, 2723, 640, 1372, + 3448, 1127, 922, 325, 3344, 7627, 7628, 711, 2044, 7629, + 7630, 3952, 2219, 2787, 1936, 3953, 3345, 2220, 2251, + 3789, 2300, 7631, 4431, 3790, 1258, 3279, 3954, 3204, + 2138, 2950, 3955, 3956, 7632, 2221, 258, 3205, 4432, + 101, 1227, 7633, 3280, 1755, 7634, 1391, 3281, 7635, + 2910, 2056, 893, 7636, 7637, 7638, 1402, 4161, 2342, + 7639, 7640, 3206, 3556, 7641, 7642, 878, 1325, 1780, + 2788, 4433, 259, 1385, 2577, 744, 1183, 2267, 4434, 7643, + 3957, 2502, 7644, 684, 1024, 4162, 7645, 472, 3557, 3449, + 1165, 3282, 3958, 3959, 322, 2152, 881, 455, 1695, 1152, + 1340, 660, 554, 2153, 4435, 1058, 4436, 4163, 830, 1065, + 3346, 3960, 4437, 1923, 7646, 1703, 1918, 7647, 932, + 2268, 122, 7648, 4438, 947, 677, 7649, 3791, 2627, 297, + 1905, 1924, 2269, 4439, 2317, 3283, 7650, 7651, 4164, + 7652, 4165, 84, 4166, 112, 989, 7653, 547, 1059, 3961, + 701, 3558, 1019, 7654, 4167, 7655, 3450, 942, 639, 457, + 2301, 2451, 993, 2951, 407, 851, 494, 4440, 3347, 927, + 7656, 1237, 7657, 2421, 3348, 573, 4168, 680, 921, 2911, + 1279, 1874, 285, 790, 1448, 1983, 719, 2167, 7658, 7659, + 4441, 3962, 3963, 1649, 7660, 1541, 563, 7661, 1077, + 7662, 3349, 3041, 3451, 511, 2997, 3964, 3965, 3667, + 3966, 1268, 2564, 3350, 3207, 4442, 4443, 7663, 535, + 1048, 1276, 1189, 2912, 2028, 3142, 1438, 1373, 2834, + 2952, 1134, 2012, 7664, 4169, 1238, 2578, 3086, 1259, + 7665, 700, 7666, 2953, 3143, 3668, 4170, 7667, 4171, + 1146, 1875, 1906, 4444, 2601, 3967, 781, 2422, 132, 1589, + 203, 147, 273, 2789, 2402, 898, 1786, 2154, 3968, 3969, + 7668, 3792, 2790, 7669, 7670, 4445, 4446, 7671, 3208, + 7672, 1635, 3793, 965, 7673, 1804, 2690, 1516, 3559, + 1121, 1082, 1329, 3284, 3970, 1449, 3794, 65, 1128, 2835, + 2913, 2759, 1590, 3795, 7674, 7675, 12, 2658, 45, 976, + 2579, 3144, 4447, 517, 2528, 1013, 1037, 3209, 7676, + 3796, 2836, 7677, 3797, 7678, 3452, 7679, 2602, 614, + 1998, 2318, 3798, 3087, 2724, 2628, 7680, 2580, 4172, + 599, 1269, 7681, 1810, 3669, 7682, 2691, 3088, 759, 1060, + 489, 1805, 3351, 3285, 1358, 7683, 7684, 2386, 1387, + 1215, 2629, 2252, 490, 7685, 7686, 4173, 1759, 2387, + 2343, 7687, 4448, 3799, 1907, 3971, 2630, 1806, 3210, + 4449, 3453, 3286, 2760, 2344, 874, 7688, 7689, 3454, + 3670, 1858, 91, 2914, 3671, 3042, 3800, 4450, 7690, 3145, + 3972, 2659, 7691, 3455, 1202, 1403, 3801, 2954, 2529, + 1517, 2503, 4451, 3456, 2504, 7692, 4452, 7693, 2692, + 1885, 1495, 1731, 3973, 2365, 4453, 7694, 2029, 7695, + 7696, 3974, 2693, 1216, 237, 2581, 4174, 2319, 3975, + 3802, 4454, 4455, 2694, 3560, 3457, 445, 4456, 7697, + 7698, 7699, 7700, 2761, 61, 3976, 3672, 1822, 3977, 7701, + 687, 2045, 935, 925, 405, 2660, 703, 1096, 1859, 2725, + 4457, 3978, 1876, 1367, 2695, 3352, 918, 2105, 1781, + 2476, 334, 3287, 1611, 1093, 4458, 564, 3146, 3458, 3673, + 3353, 945, 2631, 2057, 4459, 7702, 1925, 872, 4175, 7703, + 3459, 2696, 3089, 349, 4176, 3674, 3979, 4460, 3803, + 4177, 3675, 2155, 3980, 4461, 4462, 4178, 4463, 2403, + 2046, 782, 3981, 400, 251, 4179, 1624, 7704, 7705, 277, + 3676, 299, 1265, 476, 1191, 3804, 2121, 4180, 4181, 1109, + 205, 7706, 2582, 1000, 2156, 3561, 1860, 7707, 7708, + 7709, 4464, 7710, 4465, 2565, 107, 2477, 2157, 3982, + 3460, 3147, 7711, 1533, 541, 1301, 158, 753, 4182, 2872, + 3562, 7712, 1696, 370, 1088, 4183, 4466, 3563, 579, 327, + 440, 162, 2240, 269, 1937, 1374, 3461, 968, 3043, 56, + 1396, 3090, 2106, 3288, 3354, 7713, 1926, 2158, 4467, + 2998, 7714, 3564, 7715, 7716, 3677, 4468, 2478, 7717, + 2791, 7718, 1650, 4469, 7719, 2603, 7720, 7721, 3983, + 2661, 3355, 1149, 3356, 3984, 3805, 3985, 7722, 1076, + 49, 7723, 951, 3211, 3289, 3290, 450, 2837, 920, 7724, + 1811, 2792, 2366, 4184, 1908, 1138, 2367, 3806, 3462, + 7725, 3212, 4470, 1909, 1147, 1518, 2423, 4471, 3807, + 7726, 4472, 2388, 2604, 260, 1795, 3213, 7727, 7728, + 3808, 3291, 708, 7729, 3565, 1704, 7730, 3566, 1351, + 1618, 3357, 2999, 1886, 944, 4185, 3358, 4186, 3044, + 3359, 4187, 7731, 3678, 422, 413, 1714, 3292, 500, 2058, + 2345, 4188, 2479, 7732, 1344, 1910, 954, 7733, 1668, + 7734, 7735, 3986, 2404, 4189, 3567, 3809, 4190, 7736, + 2302, 1318, 2505, 3091, 133, 3092, 2873, 4473, 629, 31, + 2838, 2697, 3810, 4474, 850, 949, 4475, 3987, 2955, 1732, + 2088, 4191, 1496, 1852, 7737, 3988, 620, 3214, 981, 1242, + 3679, 3360, 1619, 3680, 1643, 3293, 2139, 2452, 1970, + 1719, 3463, 2168, 7738, 3215, 7739, 7740, 3361, 1828, + 7741, 1277, 4476, 1565, 2047, 7742, 1636, 3568, 3093, + 7743, 869, 2839, 655, 3811, 3812, 3094, 3989, 3000, 3813, + 1310, 3569, 4477, 7744, 7745, 7746, 1733, 558, 4478, + 3681, 335, 1549, 3045, 1756, 4192, 3682, 1945, 3464, + 1829, 1291, 1192, 470, 2726, 2107, 2793, 913, 1054, 3990, + 7747, 1027, 7748, 3046, 3991, 4479, 982, 2662, 3362, + 3148, 3465, 3216, 3217, 1946, 2794, 7749, 571, 4480, + 7750, 1830, 7751, 3570, 2583, 1523, 2424, 7752, 2089, + 984, 4481, 3683, 1959, 7753, 3684, 852, 923, 2795, 3466, + 3685, 969, 1519, 999, 2048, 2320, 1705, 7754, 3095, 615, + 1662, 151, 597, 3992, 2405, 2321, 1049, 275, 4482, 3686, + 4193, 568, 3687, 3571, 2480, 4194, 3688, 7755, 2425, + 2270, 409, 3218, 7756, 1566, 2874, 3467, 1002, 769, 2840, + 194, 2090, 3149, 3689, 2222, 3294, 4195, 628, 1505, 7757, + 7758, 1763, 2177, 3001, 3993, 521, 1161, 2584, 1787, + 2203, 2406, 4483, 3994, 1625, 4196, 4197, 412, 42, 3096, + 464, 7759, 2632, 4484, 3363, 1760, 1571, 2875, 3468, + 2530, 1219, 2204, 3814, 2633, 2140, 2368, 4485, 4486, + 3295, 1651, 3364, 3572, 7760, 7761, 3573, 2481, 3469, + 7762, 3690, 7763, 7764, 2271, 2091, 460, 7765, 4487, + 7766, 3002, 962, 588, 3574, 289, 3219, 2634, 1116, 52, + 7767, 3047, 1796, 7768, 7769, 7770, 1467, 7771, 1598, + 1143, 3691, 4198, 1984, 1734, 1067, 4488, 1280, 3365, + 465, 4489, 1572, 510, 7772, 1927, 2241, 1812, 1644, 3575, + 7773, 4490, 3692, 7774, 7775, 2663, 1573, 1534, 7776, + 7777, 4199, 536, 1807, 1761, 3470, 3815, 3150, 2635, + 7778, 7779, 7780, 4491, 3471, 2915, 1911, 2796, 7781, + 3296, 1122, 377, 3220, 7782, 360, 7783, 7784, 4200, 1529, + 551, 7785, 2059, 3693, 1769, 2426, 7786, 2916, 4201, + 3297, 3097, 2322, 2108, 2030, 4492, 1404, 136, 1468, + 1479, 672, 1171, 3221, 2303, 271, 3151, 7787, 2762, 7788, + 2049, 678, 2727, 865, 1947, 4493, 7789, 2013, 3995, 2956, + 7790, 2728, 2223, 1397, 3048, 3694, 4494, 4495, 1735, + 2917, 3366, 3576, 7791, 3816, 509, 2841, 2453, 2876, + 3817, 7792, 7793, 3152, 3153, 4496, 4202, 2531, 4497, + 2304, 1166, 1010, 552, 681, 1887, 7794, 7795, 2957, 2958, + 3996, 1287, 1596, 1861, 3154, 358, 453, 736, 175, 478, + 1117, 905, 1167, 1097, 7796, 1853, 1530, 7797, 1706, + 7798, 2178, 3472, 2287, 3695, 3473, 3577, 4203, 2092, + 4204, 7799, 3367, 1193, 2482, 4205, 1458, 2190, 2205, + 1862, 1888, 1421, 3298, 2918, 3049, 2179, 3474, 595, + 2122, 7800, 3997, 7801, 7802, 4206, 1707, 2636, 223, + 3696, 1359, 751, 3098, 183, 3475, 7803, 2797, 3003, 419, + 2369, 633, 704, 3818, 2389, 241, 7804, 7805, 7806, 838, + 3004, 3697, 2272, 2763, 2454, 3819, 1938, 2050, 3998, + 1309, 3099, 2242, 1181, 7807, 1136, 2206, 3820, 2370, + 1446, 4207, 2305, 4498, 7808, 7809, 4208, 1055, 2605, + 484, 3698, 7810, 3999, 625, 4209, 2273, 3368, 1499, 4210, + 4000, 7811, 4001, 4211, 3222, 2274, 2275, 3476, 7812, + 7813, 2764, 808, 2606, 3699, 3369, 4002, 4212, 3100, + 2532, 526, 3370, 3821, 4213, 955, 7814, 1620, 4214, 2637, + 2427, 7815, 1429, 3700, 1669, 1831, 994, 928, 7816, 3578, + 1260, 7817, 7818, 7819, 1948, 2288, 741, 2919, 1626, + 4215, 2729, 2455, 867, 1184, 362, 3371, 1392, 7820, 7821, + 4003, 4216, 1770, 1736, 3223, 2920, 4499, 4500, 1928, + 2698, 1459, 1158, 7822, 3050, 3372, 2877, 1292, 1929, + 2506, 2842, 3701, 1985, 1187, 2071, 2014, 2607, 4217, + 7823, 2566, 2507, 2169, 3702, 2483, 3299, 7824, 3703, + 4501, 7825, 7826, 666, 1003, 3005, 1022, 3579, 4218, + 7827, 4502, 1813, 2253, 574, 3822, 1603, 295, 1535, 705, + 3823, 4219, 283, 858, 417, 7828, 7829, 3224, 4503, 4504, + 3051, 1220, 1889, 1046, 2276, 2456, 4004, 1393, 1599, + 689, 2567, 388, 4220, 7830, 2484, 802, 7831, 2798, 3824, + 2060, 1405, 2254, 7832, 4505, 3825, 2109, 1052, 1345, + 3225, 1585, 7833, 809, 7834, 7835, 7836, 575, 2730, 3477, + 956, 1552, 1469, 1144, 2323, 7837, 2324, 1560, 2457, + 3580, 3226, 4005, 616, 2207, 3155, 2180, 2289, 7838, + 1832, 7839, 3478, 4506, 7840, 1319, 3704, 3705, 1211, + 3581, 1023, 3227, 1293, 2799, 7841, 7842, 7843, 3826, + 607, 2306, 3827, 762, 2878, 1439, 4221, 1360, 7844, 1485, + 3052, 7845, 4507, 1038, 4222, 1450, 2061, 2638, 4223, + 1379, 4508, 2585, 7846, 7847, 4224, 1352, 1414, 2325, + 2921, 1172, 7848, 7849, 3828, 3829, 7850, 1797, 1451, + 7851, 7852, 7853, 7854, 2922, 4006, 4007, 2485, 2346, + 411, 4008, 4009, 3582, 3300, 3101, 4509, 1561, 2664, + 1452, 4010, 1375, 7855, 7856, 47, 2959, 316, 7857, 1406, + 1591, 2923, 3156, 7858, 1025, 2141, 3102, 3157, 354, + 2731, 884, 2224, 4225, 2407, 508, 3706, 726, 3583, 996, + 2428, 3584, 729, 7859, 392, 2191, 1453, 4011, 4510, 3707, + 7860, 7861, 2458, 3585, 2608, 1675, 2800, 919, 2347, + 2960, 2348, 1270, 4511, 4012, 73, 7862, 7863, 647, 7864, + 3228, 2843, 2255, 1550, 1346, 3006, 7865, 1332, 883, + 3479, 7866, 7867, 7868, 7869, 3301, 2765, 7870, 1212, + 831, 1347, 4226, 4512, 2326, 3830, 1863, 3053, 720, 3831, + 4513, 4514, 3832, 7871, 4227, 7872, 7873, 4515, 7874, + 7875, 1798, 4516, 3708, 2609, 4517, 3586, 1645, 2371, + 7876, 7877, 2924, 669, 2208, 2665, 2429, 7878, 2879, + 7879, 7880, 1028, 3229, 7881, 4228, 2408, 7882, 2256, + 1353, 7883, 7884, 4518, 3158, 518, 7885, 4013, 7886, + 4229, 1960, 7887, 2142, 4230, 7888, 7889, 3007, 2349, + 2350, 3833, 516, 1833, 1454, 4014, 2699, 4231, 4519, + 2225, 2610, 1971, 1129, 3587, 7890, 2766, 7891, 2961, + 1422, 577, 1470, 3008, 1524, 3373, 7892, 7893, 432, 4232, + 3054, 3480, 7894, 2586, 1455, 2508, 2226, 1972, 1175, + 7895, 1020, 2732, 4015, 3481, 4520, 7896, 2733, 7897, + 1743, 1361, 3055, 3482, 2639, 4016, 4233, 4521, 2290, + 895, 924, 4234, 2170, 331, 2243, 3056, 166, 1627, 3057, + 1098, 7898, 1232, 2880, 2227, 3374, 4522, 657, 403, 1196, + 2372, 542, 3709, 3375, 1600, 4235, 3483, 7899, 4523, + 2767, 3230, 576, 530, 1362, 7900, 4524, 2533, 2666, 3710, + 4017, 7901, 842, 3834, 7902, 2801, 2031, 1014, 4018, + 213, 2700, 3376, 665, 621, 4236, 7903, 3711, 2925, 2430, + 7904, 2431, 3302, 3588, 3377, 7905, 4237, 2534, 4238, + 4525, 3589, 1682, 4239, 3484, 1380, 7906, 724, 2277, + 600, 1670, 7907, 1337, 1233, 4526, 3103, 2244, 7908, + 1621, 4527, 7909, 651, 4240, 7910, 1612, 4241, 2611, + 7911, 2844, 7912, 2734, 2307, 3058, 7913, 716, 2459, + 3059, 174, 1255, 2701, 4019, 3590, 548, 1320, 1398, 728, + 4020, 1574, 7914, 1890, 1197, 3060, 4021, 7915, 3061, + 3062, 3712, 3591, 3713, 747, 7916, 635, 4242, 4528, 7917, + 7918, 7919, 4243, 7920, 7921, 4529, 7922, 3378, 4530, + 2432, 451, 7923, 3714, 2535, 2072, 4244, 2735, 4245, + 4022, 7924, 1764, 4531, 7925, 4246, 350, 7926, 2278, + 2390, 2486, 7927, 4247, 4023, 2245, 1434, 4024, 488, + 4532, 458, 4248, 4025, 3715, 771, 1330, 2391, 3835, 2568, + 3159, 2159, 2409, 1553, 2667, 3160, 4249, 7928, 2487, + 2881, 2612, 1720, 2702, 4250, 3379, 4533, 7929, 2536, + 4251, 7930, 3231, 4252, 2768, 7931, 2015, 2736, 7932, + 1155, 1017, 3716, 3836, 7933, 3303, 2308, 201, 1864, + 4253, 1430, 7934, 4026, 7935, 7936, 7937, 7938, 7939, + 4254, 1604, 7940, 414, 1865, 371, 2587, 4534, 4535, 3485, + 2016, 3104, 4536, 1708, 960, 4255, 887, 389, 2171, 1536, + 1663, 1721, 7941, 2228, 4027, 2351, 2926, 1580, 7942, + 7943, 7944, 1744, 7945, 2537, 4537, 4538, 7946, 4539, + 7947, 2073, 7948, 7949, 3592, 3380, 2882, 4256, 7950, + 4257, 2640, 3381, 2802, 673, 2703, 2460, 709, 3486, 4028, + 3593, 4258, 7951, 1148, 502, 634, 7952, 7953, 1204, 4540, + 3594, 1575, 4541, 2613, 3717, 7954, 3718, 3105, 948, + 3232, 121, 1745, 3837, 1110, 7955, 4259, 3063, 2509, + 3009, 4029, 3719, 1151, 1771, 3838, 1488, 4030, 1986, + 7956, 2433, 3487, 7957, 7958, 2093, 7959, 4260, 3839, + 1213, 1407, 2803, 531, 2737, 2538, 3233, 1011, 1537, + 7960, 2769, 4261, 3106, 1061, 7961, 3720, 3721, 1866, + 2883, 7962, 2017, 120, 4262, 4263, 2062, 3595, 3234, + 2309, 3840, 2668, 3382, 1954, 4542, 7963, 7964, 3488, + 1047, 2704, 1266, 7965, 1368, 4543, 2845, 649, 3383, + 3841, 2539, 2738, 1102, 2846, 2669, 7966, 7967, 1999, + 7968, 1111, 3596, 2962, 7969, 2488, 3842, 3597, 2804, + 1854, 3384, 3722, 7970, 7971, 3385, 2410, 2884, 3304, + 3235, 3598, 7972, 2569, 7973, 3599, 2805, 4031, 1460, + 856, 7974, 3600, 7975, 2885, 2963, 7976, 2886, 3843, + 7977, 4264, 632, 2510, 875, 3844, 1697, 3845, 2291, 7978, + 7979, 4544, 3010, 1239, 580, 4545, 4265, 7980, 914, 936, + 2074, 1190, 4032, 1039, 2123, 7981, 7982, 7983, 3386, + 1473, 7984, 1354, 4266, 3846, 7985, 2172, 3064, 4033, + 915, 3305, 4267, 4268, 3306, 1605, 1834, 7986, 2739, + 398, 3601, 4269, 3847, 4034, 328, 1912, 2847, 4035, 3848, + 1331, 4270, 3011, 937, 4271, 7987, 3602, 4036, 4037, + 3387, 2160, 4546, 3388, 524, 742, 538, 3065, 1012, 7988, + 7989, 3849, 2461, 7990, 658, 1103, 225, 3850, 7991, 7992, + 4547, 7993, 4548, 7994, 3236, 1243, 7995, 4038, 963, + 2246, 4549, 7996, 2705, 3603, 3161, 7997, 7998, 2588, + 2327, 7999, 4550, 8000, 8001, 8002, 3489, 3307, 957, + 3389, 2540, 2032, 1930, 2927, 2462, 870, 2018, 3604, + 1746, 2770, 2771, 2434, 2463, 8003, 3851, 8004, 3723, + 3107, 3724, 3490, 3390, 3725, 8005, 1179, 3066, 8006, + 3162, 2373, 4272, 3726, 2541, 3163, 3108, 2740, 4039, + 8007, 3391, 1556, 2542, 2292, 977, 2887, 2033, 4040, + 1205, 3392, 8008, 1765, 3393, 3164, 2124, 1271, 1689, + 714, 4551, 3491, 8009, 2328, 3852, 533, 4273, 3605, 2181, + 617, 8010, 2464, 3308, 3492, 2310, 8011, 8012, 3165, + 8013, 8014, 3853, 1987, 618, 427, 2641, 3493, 3394, 8015, + 8016, 1244, 1690, 8017, 2806, 4274, 4552, 8018, 3494, + 8019, 8020, 2279, 1576, 473, 3606, 4275, 3395, 972, 8021, + 3607, 8022, 3067, 8023, 8024, 4553, 4554, 8025, 3727, + 4041, 4042, 8026, 153, 4555, 356, 8027, 1891, 2888, 4276, + 2143, 408, 803, 2352, 8028, 3854, 8029, 4277, 1646, 2570, + 2511, 4556, 4557, 3855, 8030, 3856, 4278, 8031, 2411, + 3396, 752, 8032, 8033, 1961, 2964, 8034, 746, 3012, 2465, + 8035, 4279, 3728, 698, 4558, 1892, 4280, 3608, 2543, + 4559, 3609, 3857, 8036, 3166, 3397, 8037, 1823, 1302, + 4043, 2706, 3858, 1973, 4281, 8038, 4282, 3167, 823, + 1303, 1288, 1236, 2848, 3495, 4044, 3398, 774, 3859, + 8039, 1581, 4560, 1304, 2849, 3860, 4561, 8040, 2435, + 2161, 1083, 3237, 4283, 4045, 4284, 344, 1173, 288, 2311, + 454, 1683, 8041, 8042, 1461, 4562, 4046, 2589, 8043, + 8044, 4563, 985, 894, 8045, 3399, 3168, 8046, 1913, 2928, + 3729, 1988, 8047, 2110, 1974, 8048, 4047, 8049, 2571, + 1194, 425, 8050, 4564, 3169, 1245, 3730, 4285, 8051, + 8052, 2850, 8053, 636, 4565, 1855, 3861, 760, 1799, 8054, + 4286, 2209, 1508, 4566, 4048, 1893, 1684, 2293, 8055, + 8056, 8057, 4287, 4288, 2210, 479, 8058, 8059, 832, 8060, + 4049, 2489, 8061, 2965, 2490, 3731, 990, 3109, 627, 1814, + 2642, 4289, 1582, 4290, 2125, 2111, 3496, 4567, 8062, + 799, 4291, 3170, 8063, 4568, 2112, 1737, 3013, 1018, + 543, 754, 4292, 3309, 1676, 4569, 4570, 4050, 8064, 1489, + 8065, 3497, 8066, 2614, 2889, 4051, 8067, 8068, 2966, + 8069, 8070, 8071, 8072, 3171, 4571, 4572, 2182, 1722, + 8073, 3238, 3239, 1842, 3610, 1715, 481, 365, 1975, 1856, + 8074, 8075, 1962, 2491, 4573, 8076, 2126, 3611, 3240, + 433, 1894, 2063, 2075, 8077, 602, 2741, 8078, 8079, 8080, + 8081, 8082, 3014, 1628, 3400, 8083, 3172, 4574, 4052, + 2890, 4575, 2512, 8084, 2544, 2772, 8085, 8086, 8087, + 3310, 4576, 2891, 8088, 4577, 8089, 2851, 4578, 4579, + 1221, 2967, 4053, 2513, 8090, 8091, 8092, 1867, 1989, + 8093, 8094, 8095, 1895, 8096, 8097, 4580, 1896, 4054, + 318, 8098, 2094, 4055, 4293, 8099, 8100, 485, 8101, 938, + 3862, 553, 2670, 116, 8102, 3863, 3612, 8103, 3498, 2671, + 2773, 3401, 3311, 2807, 8104, 3613, 2929, 4056, 1747, + 2930, 2968, 8105, 8106, 207, 8107, 8108, 2672, 4581, + 2514, 8109, 3015, 890, 3614, 3864, 8110, 1877, 3732, + 3402, 8111, 2183, 2353, 3403, 1652, 8112, 8113, 8114, + 941, 2294, 208, 3499, 4057, 2019, 330, 4294, 3865, 2892, + 2492, 3733, 4295, 8115, 8116, 8117, 8118) diff --git a/APPS_UNCOMPILED/lib/chardet/euctwprober.py b/APPS_UNCOMPILED/lib/chardet/euctwprober.py new file mode 100644 index 0000000..99fd49f --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/euctwprober.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/euctwprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1747 bytes +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/APPS_UNCOMPILED/lib/chardet/gb2312freq.py b/APPS_UNCOMPILED/lib/chardet/gb2312freq.py new file mode 100644 index 0000000..3972dab --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/gb2312freq.py @@ -0,0 +1,405 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/gb2312freq.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 20715 bytes +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 +GB2312_TABLE_SIZE = 3760 +GB2312_CHAR_TO_FREQ_ORDER = (1671, 749, 1443, 2364, 3924, 3807, 2330, 3921, 1704, 3463, + 2691, 1511, 1515, 572, 3191, 2205, 2361, 224, 2558, + 479, 1711, 963, 3162, 440, 4060, 1905, 2966, 2947, 3580, + 2647, 3961, 3842, 2204, 869, 4207, 970, 2678, 5626, + 2944, 2956, 1479, 4048, 514, 3595, 588, 1346, 2820, + 3409, 249, 4088, 1746, 1873, 2047, 1774, 581, 1813, + 358, 1174, 3590, 1014, 1561, 4844, 2245, 670, 1636, + 3112, 889, 1286, 953, 556, 2327, 3060, 1290, 3141, 613, + 185, 3477, 1367, 850, 3820, 1715, 2428, 2642, 2303, + 2732, 3041, 2562, 2648, 3566, 3946, 1349, 388, 3098, + 2091, 1360, 3585, 152, 1687, 1539, 738, 1559, 59, 1232, + 2925, 2267, 1388, 1249, 1741, 1679, 2960, 151, 1566, + 1125, 1352, 4271, 924, 4296, 385, 3166, 4459, 310, 1245, + 2850, 70, 3285, 2729, 3534, 3575, 2398, 3298, 3466, + 1960, 2265, 217, 3647, 864, 1909, 2084, 4401, 2773, + 1010, 3269, 5152, 853, 3051, 3121, 1244, 4251, 1895, + 364, 1499, 1540, 2313, 1180, 3655, 2268, 562, 715, 2417, + 3061, 544, 336, 3768, 2380, 1752, 4075, 950, 280, 2425, + 4382, 183, 2759, 3272, 333, 4297, 2155, 1688, 2356, + 1444, 1039, 4540, 736, 1177, 3349, 2443, 2368, 2144, + 2225, 565, 196, 1482, 3406, 927, 1335, 4147, 692, 878, + 1311, 1653, 3911, 3622, 1378, 4200, 1840, 2969, 3149, + 2126, 1816, 2534, 1546, 2393, 2760, 737, 2494, 13, 447, + 245, 2747, 38, 2765, 2129, 2589, 1079, 606, 360, 471, + 3755, 2890, 404, 848, 699, 1785, 1236, 370, 2221, 1023, + 3746, 2074, 2026, 2023, 2388, 1581, 2119, 812, 1141, + 3091, 2536, 1519, 804, 2053, 406, 1596, 1090, 784, 548, + 4414, 1806, 2264, 2936, 1100, 343, 4114, 5096, 622, + 3358, 743, 3668, 1510, 1626, 5020, 3567, 2513, 3195, + 4115, 5627, 2489, 2991, 24, 2065, 2697, 1087, 2719, + 48, 1634, 315, 68, 985, 2052, 198, 2239, 1347, 1107, + 1439, 597, 2366, 2172, 871, 3307, 919, 2487, 2790, 1867, + 236, 2570, 1413, 3794, 906, 3365, 3381, 1701, 1982, + 1818, 1524, 2924, 1205, 616, 2586, 2072, 2004, 575, + 253, 3099, 32, 1365, 1182, 197, 1714, 2454, 1201, 554, + 3388, 3224, 2748, 756, 2587, 250, 2567, 1507, 1517, + 3529, 1922, 2761, 2337, 3416, 1961, 1677, 2452, 2238, + 3153, 615, 911, 1506, 1474, 2495, 1265, 1906, 2749, + 3756, 3280, 2161, 898, 2714, 1759, 3450, 2243, 2444, + 563, 26, 3286, 2266, 3769, 3344, 2707, 3677, 611, 1402, + 531, 1028, 2871, 4548, 1375, 261, 2948, 835, 1190, 4134, + 353, 840, 2684, 1900, 3082, 1435, 2109, 1207, 1674, + 329, 1872, 2781, 4055, 2686, 2104, 608, 3318, 2423, + 2957, 2768, 1108, 3739, 3512, 3271, 3985, 2203, 1771, + 3520, 1418, 2054, 1681, 1153, 225, 1627, 2929, 162, + 2050, 2511, 3687, 1954, 124, 1859, 2431, 1684, 3032, + 2894, 585, 4805, 3969, 2869, 2704, 2088, 2032, 2095, + 3656, 2635, 4362, 2209, 256, 518, 2042, 2105, 3777, + 3657, 643, 2298, 1148, 1779, 190, 989, 3544, 414, 11, + 2135, 2063, 2979, 1471, 403, 3678, 126, 770, 1563, 671, + 2499, 3216, 2877, 600, 1179, 307, 2805, 4937, 1268, + 1297, 2694, 252, 4032, 1448, 1494, 1331, 1394, 127, + 2256, 222, 1647, 1035, 1481, 3056, 1915, 1048, 873, + 3651, 210, 33, 1608, 2516, 200, 1520, 415, 102, 0, 3389, + 1287, 817, 91, 3299, 2940, 836, 1814, 549, 2197, 1396, + 1669, 2987, 3582, 2297, 2848, 4528, 1070, 687, 20, 1819, + 121, 1552, 1364, 1461, 1968, 2617, 3540, 2824, 2083, + 177, 948, 4938, 2291, 110, 4549, 2066, 648, 3359, 1755, + 2110, 2114, 4642, 4845, 1693, 3937, 3308, 1257, 1869, + 2123, 208, 1804, 3159, 2992, 2531, 2549, 3361, 2418, + 1350, 2347, 2800, 2568, 1291, 2036, 2680, 72, 842, 1990, + 212, 1233, 1154, 1586, 75, 2027, 3410, 4900, 1823, 1337, + 2710, 2676, 728, 2810, 1522, 3026, 4995, 157, 755, 1050, + 4022, 710, 785, 1936, 2194, 2085, 1406, 2777, 2400, + 150, 1250, 4049, 1206, 807, 1910, 534, 529, 3309, 1721, + 1660, 274, 39, 2827, 661, 2670, 1578, 925, 3248, 3815, + 1094, 4278, 4901, 4252, 41, 1150, 3747, 2572, 2227, + 4501, 3658, 4902, 3813, 3357, 3617, 2884, 2258, 887, + 538, 4187, 3199, 1294, 2439, 3042, 2329, 2343, 2497, + 1255, 107, 543, 1527, 521, 3478, 3568, 194, 5062, 15, + 961, 3870, 1241, 1192, 2664, 66, 5215, 3260, 2111, 1295, + 1127, 2152, 3805, 4135, 901, 1164, 1976, 398, 1278, + 530, 1460, 748, 904, 1054, 1966, 1426, 53, 2909, 509, + 523, 2279, 1534, 536, 1019, 239, 1685, 460, 2353, 673, + 1065, 2401, 3600, 4298, 2272, 1272, 2363, 284, 1753, + 3679, 4064, 1695, 81, 815, 2677, 2757, 2731, 1386, 859, + 500, 4221, 2190, 2566, 757, 1006, 2519, 2068, 1166, + 1455, 337, 2654, 3203, 1863, 1682, 1914, 3025, 1252, + 1409, 1366, 847, 714, 2834, 2038, 3209, 964, 2970, 1901, + 885, 2553, 1078, 1756, 3049, 301, 1572, 3326, 688, 2130, + 1996, 2429, 1805, 1648, 2930, 3421, 2750, 3652, 3088, + 262, 1158, 1254, 389, 1641, 1812, 526, 1719, 923, 2073, + 1073, 1902, 468, 489, 4625, 1140, 857, 2375, 3070, 3319, + 2863, 380, 116, 1328, 2693, 1161, 2244, 273, 1212, 1884, + 2769, 3011, 1775, 1142, 461, 3066, 1200, 2147, 2212, + 790, 702, 2695, 4222, 1601, 1058, 434, 2338, 5153, 3640, + 67, 2360, 4099, 2502, 618, 3472, 1329, 416, 1132, 830, + 2782, 1807, 2653, 3211, 3510, 1662, 192, 2124, 296, + 3979, 1739, 1611, 3684, 23, 118, 324, 446, 1239, 1225, + 293, 2520, 3814, 3795, 2535, 3116, 17, 1074, 467, 2692, + 2201, 387, 2922, 45, 1326, 3055, 1645, 3659, 2817, 958, + 243, 1903, 2320, 1339, 2825, 1784, 3289, 356, 576, 865, + 2315, 2381, 3377, 3916, 1088, 3122, 1713, 1655, 935, + 628, 4689, 1034, 1327, 441, 800, 720, 894, 1979, 2183, + 1528, 5289, 2702, 1071, 4046, 3572, 2399, 1571, 3281, + 79, 761, 1103, 327, 134, 758, 1899, 1371, 1615, 879, + 442, 215, 2605, 2579, 173, 2048, 2485, 1057, 2975, 3317, + 1097, 2253, 3801, 4263, 1403, 1650, 2946, 814, 4968, + 3487, 1548, 2644, 1567, 1285, 2, 295, 2636, 97, 946, + 3576, 832, 141, 4257, 3273, 760, 3821, 3521, 3156, 2607, + 949, 1024, 1733, 1516, 1803, 1920, 2125, 2283, 2665, + 3180, 1501, 2064, 3560, 2171, 1592, 803, 3518, 1416, + 732, 3897, 4258, 1363, 1362, 2458, 119, 1427, 602, 1525, + 2608, 1605, 1639, 3175, 694, 3064, 10, 465, 76, 2000, + 4846, 4208, 444, 3781, 1619, 3353, 2206, 1273, 3796, + 740, 2483, 320, 1723, 2377, 3660, 2619, 1359, 1137, + 1762, 1724, 2345, 2842, 1850, 1862, 912, 821, 1866, + 612, 2625, 1735, 2573, 3369, 1093, 844, 89, 937, 930, + 1424, 3564, 2413, 2972, 1004, 3046, 3019, 2011, 711, + 3171, 1452, 4178, 428, 801, 1943, 432, 445, 2811, 206, + 4136, 1472, 730, 349, 73, 397, 2802, 2547, 998, 1637, + 1167, 789, 396, 3217, 154, 1218, 716, 1120, 1780, 2819, + 4826, 1931, 3334, 3762, 2139, 1215, 2627, 552, 3664, + 3628, 3232, 1405, 2383, 3111, 1356, 2652, 3577, 3320, + 3101, 1703, 640, 1045, 1370, 1246, 4996, 371, 1575, + 2436, 1621, 2210, 984, 4033, 1734, 2638, 16, 4529, 663, + 2755, 3255, 1451, 3917, 2257, 1253, 1955, 2234, 1263, + 2951, 214, 1229, 617, 485, 359, 1831, 1969, 473, 2310, + 750, 2058, 165, 80, 2864, 2419, 361, 4344, 2416, 2479, + 1134, 796, 3726, 1266, 2943, 860, 2715, 938, 390, 2734, + 1313, 1384, 248, 202, 877, 1064, 2854, 522, 3907, 279, + 1602, 297, 2357, 395, 3740, 137, 2075, 944, 4089, 2584, + 1267, 3802, 62, 1533, 2285, 178, 176, 780, 2440, 201, + 3707, 590, 478, 1560, 4354, 2117, 1075, 30, 74, 4643, + 4004, 1635, 1441, 2745, 776, 2596, 238, 1077, 1692, + 1912, 2844, 605, 499, 1742, 3947, 241, 3053, 980, 1749, + 936, 2640, 4511, 2582, 515, 1543, 2162, 5322, 2892, + 2993, 890, 2148, 1924, 665, 1827, 3581, 1032, 968, 3163, + 339, 1044, 1896, 270, 583, 1791, 1720, 4367, 1194, 3488, + 3669, 43, 2523, 1657, 163, 2167, 290, 1209, 1622, 3378, + 550, 634, 2508, 2510, 695, 2634, 2384, 2512, 1476, 1414, + 220, 1469, 2341, 2138, 2852, 3183, 2900, 4939, 2865, + 3502, 1211, 3680, 854, 3227, 1299, 2976, 3172, 186, + 2998, 1459, 443, 1067, 3251, 1495, 321, 1932, 3054, + 909, 753, 1410, 1828, 436, 2441, 1119, 1587, 3164, 2186, + 1258, 227, 231, 1425, 1890, 3200, 3942, 247, 959, 725, + 5254, 2741, 577, 2158, 2079, 929, 120, 174, 838, 2813, + 591, 1115, 417, 2024, 40, 3240, 1536, 1037, 291, 4151, + 2354, 632, 1298, 2406, 2500, 3535, 1825, 1846, 3451, + 205, 1171, 345, 4238, 18, 1163, 811, 685, 2208, 1217, + 425, 1312, 1508, 1175, 4308, 2552, 1033, 587, 1381, + 3059, 2984, 3482, 340, 1316, 4023, 3972, 792, 3176, + 519, 777, 4690, 918, 933, 4130, 2981, 3741, 90, 3360, + 2911, 2200, 5184, 4550, 609, 3079, 2030, 272, 3379, + 2736, 363, 3881, 1130, 1447, 286, 779, 357, 1169, 3350, + 3137, 1630, 1220, 2687, 2391, 747, 1277, 3688, 2618, + 2682, 2601, 1156, 3196, 5290, 4034, 3102, 1689, 3596, + 3128, 874, 219, 2783, 798, 508, 1843, 2461, 269, 1658, + 1776, 1392, 1913, 2983, 3287, 2866, 2159, 2372, 829, + 4076, 46, 4253, 2873, 1889, 1894, 915, 1834, 1631, 2181, + 2318, 298, 664, 2818, 3555, 2735, 954, 3228, 3117, 527, + 3511, 2173, 681, 2712, 3033, 2247, 2346, 3467, 1652, + 155, 2164, 3382, 113, 1994, 450, 899, 494, 994, 1237, + 2958, 1875, 2336, 1926, 3727, 545, 1577, 1550, 633, + 3473, 204, 1305, 3072, 2410, 1956, 2471, 707, 2134, + 841, 2195, 2196, 2663, 3843, 1026, 4940, 990, 3252, + 4997, 368, 1092, 437, 3212, 3258, 1933, 1829, 675, 2977, + 2893, 412, 943, 3723, 4644, 3294, 3283, 2230, 2373, + 5154, 2389, 2241, 2661, 2323, 1404, 2524, 593, 787, + 677, 3008, 1275, 2059, 438, 2709, 2609, 2240, 2269, + 2246, 1446, 36, 1568, 1373, 3892, 1574, 2301, 1456, + 3962, 693, 2276, 5216, 2035, 1143, 2720, 1919, 1797, + 1811, 2763, 4137, 2597, 1830, 1699, 1488, 1198, 2090, + 424, 1694, 312, 3634, 3390, 4179, 3335, 2252, 1214, + 561, 1059, 3243, 2295, 2561, 975, 5155, 2321, 2751, + 3772, 472, 1537, 3282, 3398, 1047, 2077, 2348, 2878, + 1323, 3340, 3076, 690, 2906, 51, 369, 170, 3541, 1060, + 2187, 2688, 3670, 2541, 1083, 1683, 928, 3918, 459, + 109, 4427, 599, 3744, 4286, 143, 2101, 2730, 2490, 82, + 1588, 3036, 2121, 281, 1860, 477, 4035, 1238, 2812, + 3020, 2716, 3312, 1530, 2188, 2055, 1317, 843, 636, + 1808, 1173, 3495, 649, 181, 1002, 147, 3641, 1159, 2414, + 3750, 2289, 2795, 813, 3123, 2610, 1136, 4368, 5, 3391, + 4541, 2174, 420, 429, 1728, 754, 1228, 2115, 2219, 347, + 2223, 2733, 735, 1518, 3003, 2355, 3134, 1764, 3948, + 3329, 1888, 2424, 1001, 1234, 1972, 3321, 3363, 1672, + 1021, 1450, 1584, 226, 765, 655, 2526, 3404, 3244, 2302, + 3665, 731, 594, 2184, 319, 1576, 621, 658, 2656, 4299, + 2099, 3864, 1279, 2071, 2598, 2739, 795, 3086, 3699, + 3908, 1707, 2352, 2402, 1382, 3136, 2475, 1465, 4847, + 3496, 3865, 1085, 3004, 2591, 1084, 213, 2287, 1963, + 3565, 2250, 822, 793, 4574, 3187, 1772, 1789, 3050, + 595, 1484, 1959, 2770, 1080, 2650, 456, 422, 2996, 940, + 3322, 4328, 4345, 3092, 2742, 965, 2784, 739, 4124, + 952, 1358, 2498, 2949, 2565, 332, 2698, 2378, 660, 2260, + 2473, 4194, 3856, 2919, 535, 1260, 2651, 1208, 1428, + 1300, 1949, 1303, 2942, 433, 2455, 2450, 1251, 1946, + 614, 1269, 641, 1306, 1810, 2737, 3078, 2912, 564, 2365, + 1419, 1415, 1497, 4460, 2367, 2185, 1379, 3005, 1307, + 3218, 2175, 1897, 3063, 682, 1157, 4040, 4005, 1712, + 1160, 1941, 1399, 394, 402, 2952, 1573, 1151, 2986, + 2404, 862, 299, 2033, 1489, 3006, 346, 171, 2886, 3401, + 1726, 2932, 168, 2533, 47, 2507, 1030, 3735, 1145, 3370, + 1395, 1318, 1579, 3609, 4560, 2857, 4116, 1457, 2529, + 1965, 504, 1036, 2690, 2988, 2405, 745, 5871, 849, 2397, + 2056, 3081, 863, 2359, 3857, 2096, 99, 1397, 1769, 2300, + 4428, 1643, 3455, 1978, 1757, 3718, 1440, 35, 4879, + 3742, 1296, 4228, 2280, 160, 5063, 1599, 2013, 166, + 520, 3479, 1646, 3345, 3012, 490, 1937, 1545, 1264, + 2182, 2505, 1096, 1188, 1369, 1436, 2421, 1667, 2792, + 2460, 1270, 2122, 727, 3167, 2143, 806, 1706, 1012, + 1800, 3037, 960, 2218, 1882, 805, 139, 2456, 1139, 1521, + 851, 1052, 3093, 3089, 342, 2039, 744, 5097, 1468, 1502, + 1585, 2087, 223, 939, 326, 2140, 2577, 892, 2481, 1623, + 4077, 982, 3708, 135, 2131, 87, 2503, 3114, 2326, 1106, + 876, 1616, 547, 2997, 2831, 2093, 3441, 4530, 4314, + 9, 3256, 4229, 4148, 659, 1462, 1986, 1710, 2046, 2913, + 2231, 4090, 4880, 5255, 3392, 3274, 1368, 3689, 4645, + 1477, 705, 3384, 3635, 1068, 1529, 2941, 1458, 3782, + 1509, 100, 1656, 2548, 718, 2339, 408, 1590, 2780, 3548, + 1838, 4117, 3719, 1345, 3530, 717, 3442, 2778, 3220, + 2898, 1892, 4590, 3614, 3371, 2043, 1998, 1224, 3483, + 891, 635, 584, 2559, 3355, 733, 1766, 1729, 1172, 3789, + 1891, 2307, 781, 2982, 2271, 1957, 1580, 5773, 2633, + 2005, 4195, 3097, 1535, 3213, 1189, 1934, 5693, 3262, + 586, 3118, 1324, 1598, 517, 1564, 2217, 1868, 1893, + 4445, 3728, 2703, 3139, 1526, 1787, 1992, 3882, 2875, + 1549, 1199, 1056, 2224, 1904, 2711, 5098, 4287, 338, + 1993, 3129, 3489, 2689, 1809, 2815, 1997, 957, 1855, + 3898, 2550, 3275, 3057, 1105, 1319, 627, 1505, 1911, + 1883, 3526, 698, 3629, 3456, 1833, 1431, 746, 77, 1261, + 2017, 2296, 1977, 1885, 125, 1334, 1600, 525, 1798, + 1109, 2222, 1470, 1945, 559, 2236, 1186, 3443, 2476, + 1929, 1411, 2411, 3135, 1777, 3372, 2621, 1841, 1613, + 3229, 668, 1430, 1839, 2643, 2916, 195, 1989, 2671, + 2358, 1387, 629, 3205, 2293, 5256, 4439, 123, 1310, + 888, 1879, 4300, 3021, 3605, 1003, 1162, 3192, 2910, + 2010, 140, 2395, 2859, 55, 1082, 2012, 2901, 662, 419, + 2081, 1438, 680, 2774, 4654, 3912, 1620, 1731, 1625, + 5035, 4065, 2328, 512, 1344, 802, 5443, 2163, 2311, + 2537, 524, 3399, 98, 1155, 2103, 1918, 2606, 3925, 2816, + 1393, 2465, 1504, 3773, 2177, 3963, 1478, 4346, 180, + 1113, 4655, 3461, 2028, 1698, 833, 2696, 1235, 1322, + 1594, 4408, 3623, 3013, 3225, 2040, 3022, 541, 2881, + 607, 3632, 2029, 1665, 1219, 639, 1385, 1686, 1099, + 2803, 3231, 1938, 3188, 2858, 427, 676, 2772, 1168, + 2025, 454, 3253, 2486, 3556, 230, 1950, 580, 791, 1991, + 1280, 1086, 1974, 2034, 630, 257, 3338, 2788, 4903, + 1017, 86, 4790, 966, 2789, 1995, 1696, 1131, 259, 3095, + 4188, 1308, 179, 1463, 5257, 289, 4107, 1248, 42, 3413, + 1725, 2288, 896, 1947, 774, 4474, 4254, 604, 3430, 4264, + 392, 2514, 2588, 452, 237, 1408, 3018, 988, 4531, 1970, + 3034, 3310, 540, 2370, 1562, 1288, 2990, 502, 4765, + 1147, 4, 1853, 2708, 207, 294, 2814, 4078, 2902, 2509, + 684, 34, 3105, 3532, 2551, 644, 709, 2801, 2344, 573, + 1727, 3573, 3557, 2021, 1081, 3100, 4315, 2100, 3681, + 199, 2263, 1837, 2385, 146, 3484, 1195, 2776, 3949, + 997, 1939, 3973, 1008, 1091, 1202, 1962, 1847, 1149, + 4209, 5444, 1076, 493, 117, 5400, 2521, 972, 1490, 2934, + 1796, 4542, 2374, 1512, 2933, 2657, 413, 2888, 1135, + 2762, 2314, 2156, 1355, 2369, 766, 2007, 2527, 2170, + 3124, 2491, 2593, 2632, 4757, 2437, 234, 3125, 3591, + 1898, 1750, 1376, 1942, 3468, 3138, 570, 2127, 2145, + 3276, 4131, 962, 132, 1445, 4196, 19, 941, 3624, 3480, + 3366, 1973, 1374, 4461, 3431, 2629, 283, 2415, 2275, + 808, 2887, 3620, 2112, 2563, 1353, 3610, 955, 1089, + 3103, 1053, 96, 88, 4097, 823, 3808, 1583, 399, 292, + 4091, 3313, 421, 1128, 642, 4006, 903, 2539, 1877, 2082, + 596, 29, 4066, 1790, 722, 2157, 130, 995, 1569, 769, + 1485, 464, 513, 2213, 288, 1923, 1101, 2453, 4316, 133, + 486, 2445, 50, 625, 487, 2207, 57, 423, 481, 2962, 159, + 3729, 1558, 491, 303, 482, 501, 240, 2837, 112, 3648, + 2392, 1783, 362, 8, 3433, 3422, 610, 2793, 3277, 1390, + 1284, 1654, 21, 3823, 734, 367, 623, 193, 287, 374, + 1009, 1483, 816, 476, 313, 2255, 2340, 1262, 2150, 2899, + 1146, 2581, 782, 2116, 1659, 2018, 1880, 255, 3586, + 3314, 1110, 2867, 2137, 2564, 986, 2767, 5185, 2006, + 650, 158, 926, 762, 881, 3157, 2717, 2362, 3587, 306, + 3690, 3245, 1542, 3077, 2427, 1691, 2478, 2118, 2985, + 3490, 2438, 539, 2305, 983, 129, 1754, 355, 4201, 2386, + 827, 2923, 104, 1773, 2838, 2771, 411, 2905, 3919, 376, + 767, 122, 1114, 828, 2422, 1817, 3506, 266, 3460, 1007, + 1609, 4998, 945, 2612, 4429, 2274, 726, 1247, 1964, + 2914, 2199, 2070, 4002, 4108, 657, 3323, 1422, 579, + 455, 2764, 4737, 1222, 2895, 1670, 824, 1223, 1487, + 2525, 558, 861, 3080, 598, 2659, 2515, 1967, 752, 2583, + 2376, 2214, 4180, 977, 704, 2464, 4999, 2622, 4109, + 1210, 2961, 819, 1541, 142, 2284, 44, 418, 457, 1126, + 3730, 4347, 4626, 1644, 1876, 3671, 1864, 302, 1063, + 5694, 624, 723, 1984, 3745, 1314, 1676, 2488, 1610, + 1449, 3558, 3569, 2166, 2098, 409, 1011, 2325, 3704, + 2306, 818, 1732, 1383, 1824, 1844, 3757, 999, 2705, + 3497, 1216, 1423, 2683, 2426, 2954, 2501, 2726, 2229, + 1475, 2554, 5064, 1971, 1794, 1666, 2014, 1343, 783, + 724, 191, 2434, 1354, 2220, 5065, 1763, 2752, 2472, + 4152, 131, 175, 2885, 3434, 92, 1466, 4920, 2616, 3871, + 3872, 3866, 128, 1551, 1632, 669, 1854, 3682, 4691, + 4125, 1230, 188, 2973, 3290, 1302, 1213, 560, 3266, + 917, 763, 3909, 3249, 1760, 868, 1958, 764, 1782, 2097, + 145, 2277, 3774, 4462, 64, 1491, 3062, 971, 2132, 3606, + 2442, 221, 1226, 1617, 218, 323, 1185, 3207, 3147, 571, + 619, 1473, 1005, 1744, 2281, 449, 1887, 2396, 3685, + 275, 375, 3816, 1743, 3844, 3731, 845, 1983, 2350, 4210, + 1377, 773, 967, 3499, 3052, 3743, 2725, 4007, 1697, + 1022, 3943, 1464, 3264, 2855, 2722, 1952, 1029, 2839, + 2467, 84, 4383, 2215, 820, 1391, 2015, 2448, 3672, 377, + 1948, 2168, 797, 2545, 3536, 2578, 2645, 94, 2874, 1678, + 405, 1259, 3071, 771, 546, 1315, 470, 1243, 3083, 895, + 2468, 981, 969, 2037, 846, 4181, 653, 1276, 2928, 14, + 2594, 557, 3007, 2474, 156, 902, 1338, 1740, 2574, 537, + 2518, 973, 2282, 2216, 2433, 1928, 138, 2903, 1293, + 2631, 1612, 646, 3457, 839, 2935, 111, 496, 2191, 2847, + 589, 3186, 149, 3994, 2060, 4031, 2641, 4067, 3145, + 1870, 37, 3597, 2136, 1025, 2051, 3009, 3383, 3549, + 1121, 1016, 3261, 1301, 251, 2446, 2599, 2153, 872, + 3246, 637, 334, 3705, 831, 884, 921, 3065, 3140, 4092, + 2198, 1944, 246, 2964, 108, 2045, 1152, 1921, 2308, + 1031, 203, 3173, 4170, 1907, 3890, 810, 1401, 2003, + 1690, 506, 647, 1242, 2828, 1761, 1649, 3208, 2249, + 1589, 3709, 2931, 5156, 1708, 498, 666, 2613, 834, 3817, + 1231, 184, 2851, 1124, 883, 3197, 2261, 3710, 1765, + 1553, 2658, 1178, 2639, 2351, 93, 1193, 942, 2538, 2141, + 4402, 235, 1821, 870, 1591, 2192, 1709, 1871, 3341, + 1618, 4126, 2595, 2334, 603, 651, 69, 701, 268, 2662, + 3411, 2555, 1380, 1606, 503, 448, 254, 2371, 2646, 574, + 1187, 2309, 1770, 322, 2235, 1292, 1801, 305, 566, 1133, + 229, 2067, 2057, 706, 167, 483, 2002, 2672, 3295, 1820, + 3561, 3067, 316, 378, 2746, 3452, 1112, 136, 1981, 507, + 1651, 2917, 1117, 285, 4591, 182, 2580, 3522, 1304, + 335, 3303, 1835, 2504, 1795, 1792, 2248, 674, 1018, + 2106, 2449, 1857, 2292, 2845, 976, 3047, 1781, 2600, + 2727, 1389, 1281, 52, 3152, 153, 265, 3950, 672, 3485, + 3951, 4463, 430, 1183, 365, 278, 2169, 27, 1407, 1336, + 2304, 209, 1340, 1730, 2202, 1852, 2403, 2883, 979, + 1737, 1062, 631, 2829, 2542, 3876, 2592, 825, 2086, + 2226, 3048, 3625, 352, 1417, 3724, 542, 991, 431, 1351, + 3938, 1861, 2294, 826, 1361, 2927, 3142, 3503, 1738, + 463, 2462, 2723, 582, 1916, 1595, 2808, 400, 3845, 3891, + 2868, 3621, 2254, 58, 2492, 1123, 910, 2160, 2614, 1372, + 1603, 1196, 1072, 3385, 1700, 3267, 1980, 696, 480, + 2430, 920, 799, 1570, 2920, 1951, 2041, 4047, 2540, + 1321, 4223, 2469, 3562, 2228, 1271, 2602, 401, 2833, + 3351, 2575, 5157, 907, 2312, 1256, 410, 263, 3507, 1582, + 996, 678, 1849, 2316, 1480, 908, 3545, 2237, 703, 2322, + 667, 1826, 2849, 1531, 2604, 2999, 2407, 3146, 2151, + 2630, 1786, 3711, 469, 3542, 497, 3899, 2409, 858, 837, + 4446, 3393, 1274, 786, 620, 1845, 2001, 3311, 484, 308, + 3367, 1204, 1815, 3691, 2332, 1532, 2557, 1842, 2020, + 2724, 1927, 2333, 4440, 567, 22, 1673, 2728, 4475, 1987, + 1858, 1144, 1597, 101, 1832, 3601, 12, 974, 3783, 4391, + 951, 1412, 1, 3720, 453, 4608, 4041, 528, 1041, 1027, + 3230, 2628, 1129, 875, 1051, 3291, 1203, 2262, 1069, + 2860, 2799, 2149, 2615, 3278, 144, 1758, 3040, 31, 475, + 1680, 366, 2685, 3184, 311, 1642, 4008, 2466, 5036, + 1593, 1493, 2809, 216, 1420, 1668, 233, 304, 2128, 3284, + 232, 1429, 1768, 1040, 2008, 3407, 2740, 2967, 2543, + 242, 2133, 778, 1565, 2022, 2620, 505, 2189, 2756, 1098, + 2273, 372, 1614, 708, 553, 2846, 2094, 2278, 169, 3626, + 2835, 4161, 228, 2674, 3165, 809, 1454, 1309, 466, 1705, + 1095, 900, 3423, 880, 2667, 3751, 5258, 2317, 3109, + 2571, 4317, 2766, 1503, 1342, 866, 4447, 1118, 63, 2076, + 314, 1881, 1348, 1061, 172, 978, 3515, 1747, 532, 511, + 3970, 6, 601, 905, 2699, 3300, 1751, 276, 1467, 3725, + 2668, 65, 4239, 2544, 2779, 2556, 1604, 578, 2451, 1802, + 992, 2331, 2624, 1320, 3446, 713, 1513, 1013, 103, 2786, + 2447, 1661, 886, 1702, 916, 654, 3574, 2031, 1556, 751, + 2178, 2821, 2179, 1498, 1538, 2176, 271, 914, 2251, + 2080, 1325, 638, 1953, 2937, 3877, 2432, 2754, 95, 3265, + 1716, 260, 1227, 4083, 775, 106, 1357, 3254, 426, 1607, + 555, 2480, 772, 1985, 244, 2546, 474, 495, 1046, 2611, + 1851, 2061, 71, 2089, 1675, 2590, 742, 3758, 2843, 3222, + 1433, 267, 2180, 2576, 2826, 2233, 2092, 3913, 2435, + 956, 1745, 3075, 856, 2113, 1116, 451, 3, 1988, 2896, + 1398, 993, 2463, 1878, 2049, 1341, 2718, 2721, 2870, + 2108, 712, 2904, 4363, 2753, 2324, 277, 2872, 2349, + 2649, 384, 987, 435, 691, 3000, 922, 164, 3939, 652, + 1500, 1184, 4153, 2482, 3373, 2165, 4848, 2335, 3775, + 3508, 3154, 2806, 2830, 1554, 2102, 1664, 2530, 1434, + 2408, 893, 1547, 2623, 3447, 2832, 2242, 2532, 3169, + 2856, 3223, 2078, 49, 3770, 3469, 462, 318, 656, 2259, + 3250, 3069, 679, 1629, 2758, 344, 1138, 1104, 3120, + 1836, 1283, 3115, 2154, 1437, 4448, 934, 759, 1999, + 794, 2862, 1038, 533, 2560, 1722, 2342, 855, 2626, 1197, + 1663, 4476, 3127, 85, 4240, 2528, 25, 1111, 1181, 3673, + 407, 3470, 4561, 2679, 2713, 768, 1925, 2841, 3986, + 1544, 1165, 932, 373, 1240, 2146, 1930, 2673, 721, 4766, + 354, 4333, 391, 2963, 187, 61, 3364, 1442, 1102, 330, + 1940, 1767, 341, 3809, 4118, 393, 2496, 2062, 2211, + 105, 331, 300, 439, 913, 1332, 626, 379, 3304, 1557, + 328, 689, 3952, 309, 1555, 931, 317, 2517, 3027, 325, + 569, 686, 2107, 3084, 60, 1042, 1333, 2794, 264, 3177, + 4014, 1628, 258, 3712, 7, 4464, 1176, 1043, 1778, 683, + 114, 1975, 78, 1492, 383, 1886, 510, 386, 645, 5291, + 2891, 2069, 3305, 4138, 3867, 2939, 2603, 2493, 1935, + 1066, 1848, 3588, 1015, 1282, 1289, 4609, 697, 1453, + 3044, 2666, 3611, 1856, 2412, 54, 719, 1330, 568, 3778, + 2459, 1748, 788, 492, 551, 1191, 1000, 488, 3394, 3763, + 282, 1799, 348, 2016, 1523, 3155, 2390, 1049, 382, 2019, + 1788, 1170, 729, 2968, 3523, 897, 3926, 2785, 2938, + 3292, 350, 2319, 3238, 1718, 1717, 2655, 3453, 3143, + 4465, 161, 2889, 2980, 2009, 1421, 56, 1908, 1640, 2387, + 2232, 1917, 1874, 2477, 4921, 148, 83, 3438, 592, 4245, + 2882, 1822, 1055, 741, 115, 1496, 1624, 381, 1638, 4592, + 1020, 516, 3214, 458, 947, 4575, 1432, 211, 1514, 2926, + 1865, 2142, 189, 852, 1221, 1400, 1486, 882, 2299, 4036, + 351, 28, 1122, 700, 6479, 6480, 6481, 6482, 6483) diff --git a/APPS_UNCOMPILED/lib/chardet/gb2312prober.py b/APPS_UNCOMPILED/lib/chardet/gb2312prober.py new file mode 100644 index 0000000..3a2e5cb --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/gb2312prober.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/gb2312prober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1754 bytes +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/APPS_UNCOMPILED/lib/chardet/hebrewprober.py b/APPS_UNCOMPILED/lib/chardet/hebrewprober.py new file mode 100644 index 0000000..eb1619d --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/hebrewprober.py @@ -0,0 +1,101 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/hebrewprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 13838 bytes +from .charsetprober import CharSetProber +from .enums import ProbingState + +class HebrewProber(CharSetProber): + FINAL_KAF = 234 + NORMAL_KAF = 235 + FINAL_MEM = 237 + NORMAL_MEM = 238 + FINAL_NUN = 239 + NORMAL_NUN = 240 + FINAL_PE = 243 + NORMAL_PE = 244 + FINAL_TSADI = 245 + NORMAL_TSADI = 246 + MIN_FINAL_CHAR_DISTANCE = 5 + MIN_MODEL_DISTANCE = 0.01 + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + self._prev = " " + self._before_prev = " " + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + if self.state == ProbingState.NOT_ME: + return ProbingState.NOT_ME + byte_str = self.filter_high_byte_only(byte_str) + for cur in byte_str: + if cur == " ": + if self._before_prev != " ": + if self.is_final(self._prev): + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + self._final_char_visual_score += 1 + else: + if self._before_prev == " ": + if self.is_final(self._prev): + if cur != " ": + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + return ProbingState.DETECTING + + @property + def charset_name(self): + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + modelsub = self._logical_prober.get_confidence() - self._visual_prober.get_confidence() + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return "Hebrew" + + @property + def state(self): + if self._logical_prober.state == ProbingState.NOT_ME: + if self._visual_prober.state == ProbingState.NOT_ME: + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/APPS_UNCOMPILED/lib/chardet/jisfreq.py b/APPS_UNCOMPILED/lib/chardet/jisfreq.py new file mode 100644 index 0000000..5c81f04 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/jisfreq.py @@ -0,0 +1,442 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/jisfreq.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 25777 bytes +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 +JIS_TABLE_SIZE = 4368 +JIS_CHAR_TO_FREQ_ORDER = (40, 1, 6, 182, 152, 180, 295, 2127, 285, 381, 3295, 4304, + 3068, 4606, 3165, 3510, 3511, 1822, 2785, 4607, 1193, 2226, + 5070, 4608, 171, 2996, 1247, 18, 179, 5071, 856, 1661, + 1262, 5072, 619, 127, 3431, 3512, 3230, 1899, 1700, 232, + 228, 1294, 1298, 284, 283, 2041, 2042, 1061, 1062, 48, + 49, 44, 45, 433, 434, 1040, 1041, 996, 787, 2997, 1255, + 4305, 2108, 4609, 1684, 1648, 5073, 5074, 5075, 5076, 5077, + 5078, 3687, 5079, 4610, 5080, 3927, 3928, 5081, 3296, 3432, + 290, 2285, 1471, 2187, 5082, 2580, 2825, 1303, 2140, 1739, + 1445, 2691, 3375, 1691, 3297, 4306, 4307, 4611, 452, 3376, + 1182, 2713, 3688, 3069, 4308, 5083, 5084, 5085, 5086, 5087, + 5088, 5089, 5090, 5091, 5092, 5093, 5094, 5095, 5096, 5097, + 5098, 5099, 5100, 5101, 5102, 5103, 5104, 5105, 5106, 5107, + 5108, 5109, 5110, 5111, 5112, 4097, 5113, 5114, 5115, 5116, + 5117, 5118, 5119, 5120, 5121, 5122, 5123, 5124, 5125, 5126, + 5127, 5128, 5129, 5130, 5131, 5132, 5133, 5134, 5135, 5136, + 5137, 5138, 5139, 5140, 5141, 5142, 5143, 5144, 5145, 5146, + 5147, 5148, 5149, 5150, 5151, 5152, 4612, 5153, 5154, 5155, + 5156, 5157, 5158, 5159, 5160, 5161, 5162, 5163, 5164, 5165, + 5166, 5167, 5168, 5169, 5170, 5171, 5172, 5173, 5174, 5175, + 1472, 598, 618, 820, 1205, 1309, 1412, 1858, 1307, 1692, + 5176, 5177, 5178, 5179, 5180, 5181, 5182, 1142, 1452, 1234, + 1172, 1875, 2043, 2149, 1793, 1382, 2973, 925, 2404, 1067, + 1241, 960, 1377, 2935, 1491, 919, 1217, 1865, 2030, 1406, + 1499, 2749, 4098, 5183, 5184, 5185, 5186, 5187, 5188, 2561, + 4099, 3117, 1804, 2049, 3689, 4309, 3513, 1663, 5189, 3166, + 3118, 3298, 1587, 1561, 3433, 5190, 3119, 1625, 2998, 3299, + 4613, 1766, 3690, 2786, 4614, 5191, 5192, 5193, 5194, 2161, + 26, 3377, 2, 3929, 20, 3691, 47, 4100, 50, 17, 16, 35, + 268, 27, 243, 42, 155, 24, 154, 29, 184, 4, 91, 14, 92, + 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, 12, 11, + 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151, 1069, 181, + 275, 1591, 83, 132, 1475, 126, 331, 829, 15, 69, 160, 59, + 22, 157, 55, 1079, 312, 109, 38, 23, 25, 10, 19, 79, 5195, + 61, 382, 1124, 8, 30, 5196, 5197, 5198, 5199, 5200, 5201, + 5202, 5203, 5204, 5205, 5206, 89, 62, 74, 34, 2416, 112, + 139, 196, 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, + 76, 874, 101, 258, 57, 80, 32, 364, 121, 1508, 169, 1547, + 68, 235, 145, 2999, 41, 360, 3027, 70, 63, 31, 43, 259, + 262, 1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, + 58, 565, 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, + 214, 655, 110, 261, 104, 1140, 54, 51, 36, 87, 67, 3070, + 185, 2618, 2936, 2020, 28, 1066, 2390, 2059, 5207, 5208, + 5209, 5210, 5211, 5212, 5213, 5214, 5215, 5216, 4615, 5217, + 5218, 5219, 5220, 5221, 5222, 5223, 5224, 5225, 5226, 5227, + 5228, 5229, 5230, 5231, 5232, 5233, 5234, 5235, 5236, 3514, + 5237, 5238, 5239, 5240, 5241, 5242, 5243, 5244, 2297, 2031, + 4616, 4310, 3692, 5245, 3071, 5246, 3598, 5247, 4617, 3231, + 3515, 5248, 4101, 4311, 4618, 3808, 4312, 4102, 5249, 4103, + 4104, 3599, 5250, 5251, 5252, 5253, 5254, 5255, 5256, 5257, + 5258, 5259, 5260, 5261, 5262, 5263, 5264, 5265, 5266, 5267, + 5268, 5269, 5270, 5271, 5272, 5273, 5274, 5275, 5276, 5277, + 5278, 5279, 5280, 5281, 5282, 5283, 5284, 5285, 5286, 5287, + 5288, 5289, 5290, 5291, 5292, 5293, 5294, 5295, 5296, 5297, + 5298, 5299, 5300, 5301, 5302, 5303, 5304, 5305, 5306, 5307, + 5308, 5309, 5310, 5311, 5312, 5313, 5314, 5315, 5316, 5317, + 5318, 5319, 5320, 5321, 5322, 5323, 5324, 5325, 5326, 5327, + 5328, 5329, 5330, 5331, 5332, 5333, 5334, 5335, 5336, 5337, + 5338, 5339, 5340, 5341, 5342, 5343, 5344, 5345, 5346, 5347, + 5348, 5349, 5350, 5351, 5352, 5353, 5354, 5355, 5356, 5357, + 5358, 5359, 5360, 5361, 5362, 5363, 5364, 5365, 5366, 5367, + 5368, 5369, 5370, 5371, 5372, 5373, 5374, 5375, 5376, 5377, + 5378, 5379, 5380, 5381, 363, 642, 2787, 2878, 2788, 2789, + 2316, 3232, 2317, 3434, 2011, 165, 1942, 3930, 3931, 3932, + 3933, 5382, 4619, 5383, 4620, 5384, 5385, 5386, 5387, 5388, + 5389, 5390, 5391, 5392, 5393, 5394, 5395, 5396, 5397, 5398, + 5399, 5400, 5401, 5402, 5403, 5404, 5405, 5406, 5407, 5408, + 5409, 5410, 5411, 5412, 5413, 5414, 5415, 5416, 5417, 5418, + 5419, 5420, 5421, 5422, 5423, 5424, 5425, 5426, 5427, 5428, + 5429, 5430, 5431, 5432, 5433, 5434, 5435, 5436, 5437, 5438, + 5439, 5440, 5441, 5442, 5443, 5444, 5445, 5446, 5447, 5448, + 5449, 5450, 5451, 5452, 5453, 5454, 5455, 5456, 5457, 5458, + 5459, 5460, 5461, 5462, 5463, 5464, 5465, 5466, 5467, 5468, + 5469, 5470, 5471, 5472, 5473, 5474, 5475, 5476, 5477, 5478, + 5479, 5480, 5481, 5482, 5483, 5484, 5485, 5486, 5487, 5488, + 5489, 5490, 5491, 5492, 5493, 5494, 5495, 5496, 5497, 5498, + 5499, 5500, 5501, 5502, 5503, 5504, 5505, 5506, 5507, 5508, + 5509, 5510, 5511, 5512, 5513, 5514, 5515, 5516, 5517, 5518, + 5519, 5520, 5521, 5522, 5523, 5524, 5525, 5526, 5527, 5528, + 5529, 5530, 5531, 5532, 5533, 5534, 5535, 5536, 5537, 5538, + 5539, 5540, 5541, 5542, 5543, 5544, 5545, 5546, 5547, 5548, + 5549, 5550, 5551, 5552, 5553, 5554, 5555, 5556, 5557, 5558, + 5559, 5560, 5561, 5562, 5563, 5564, 5565, 5566, 5567, 5568, + 5569, 5570, 5571, 5572, 5573, 5574, 5575, 5576, 5577, 5578, + 5579, 5580, 5581, 5582, 5583, 5584, 5585, 5586, 5587, 5588, + 5589, 5590, 5591, 5592, 5593, 5594, 5595, 5596, 5597, 5598, + 5599, 5600, 5601, 5602, 5603, 5604, 5605, 5606, 5607, 5608, + 5609, 5610, 5611, 5612, 5613, 5614, 5615, 5616, 5617, 5618, + 5619, 5620, 5621, 5622, 5623, 5624, 5625, 5626, 5627, 5628, + 5629, 5630, 5631, 5632, 5633, 5634, 5635, 5636, 5637, 5638, + 5639, 5640, 5641, 5642, 5643, 5644, 5645, 5646, 5647, 5648, + 5649, 5650, 5651, 5652, 5653, 5654, 5655, 5656, 5657, 5658, + 5659, 5660, 5661, 5662, 5663, 5664, 5665, 5666, 5667, 5668, + 5669, 5670, 5671, 5672, 5673, 5674, 5675, 5676, 5677, 5678, + 5679, 5680, 5681, 5682, 5683, 5684, 5685, 5686, 5687, 5688, + 5689, 5690, 5691, 5692, 5693, 5694, 5695, 5696, 5697, 5698, + 5699, 5700, 5701, 5702, 5703, 5704, 5705, 5706, 5707, 5708, + 5709, 5710, 5711, 5712, 5713, 5714, 5715, 5716, 5717, 5718, + 5719, 5720, 5721, 5722, 5723, 5724, 5725, 5726, 5727, 5728, + 5729, 5730, 5731, 5732, 5733, 5734, 5735, 5736, 5737, 5738, + 5739, 5740, 5741, 5742, 5743, 5744, 5745, 5746, 5747, 5748, + 5749, 5750, 5751, 5752, 5753, 5754, 5755, 5756, 5757, 5758, + 5759, 5760, 5761, 5762, 5763, 5764, 5765, 5766, 5767, 5768, + 5769, 5770, 5771, 5772, 5773, 5774, 5775, 5776, 5777, 5778, + 5779, 5780, 5781, 5782, 5783, 5784, 5785, 5786, 5787, 5788, + 5789, 5790, 5791, 5792, 5793, 5794, 5795, 5796, 5797, 5798, + 5799, 5800, 5801, 5802, 5803, 5804, 5805, 5806, 5807, 5808, + 5809, 5810, 5811, 5812, 5813, 5814, 5815, 5816, 5817, 5818, + 5819, 5820, 5821, 5822, 5823, 5824, 5825, 5826, 5827, 5828, + 5829, 5830, 5831, 5832, 5833, 5834, 5835, 5836, 5837, 5838, + 5839, 5840, 5841, 5842, 5843, 5844, 5845, 5846, 5847, 5848, + 5849, 5850, 5851, 5852, 5853, 5854, 5855, 5856, 5857, 5858, + 5859, 5860, 5861, 5862, 5863, 5864, 5865, 5866, 5867, 5868, + 5869, 5870, 5871, 5872, 5873, 5874, 5875, 5876, 5877, 5878, + 5879, 5880, 5881, 5882, 5883, 5884, 5885, 5886, 5887, 5888, + 5889, 5890, 5891, 5892, 5893, 5894, 5895, 5896, 5897, 5898, + 5899, 5900, 5901, 5902, 5903, 5904, 5905, 5906, 5907, 5908, + 5909, 5910, 5911, 5912, 5913, 5914, 5915, 5916, 5917, 5918, + 5919, 5920, 5921, 5922, 5923, 5924, 5925, 5926, 5927, 5928, + 5929, 5930, 5931, 5932, 5933, 5934, 5935, 5936, 5937, 5938, + 5939, 5940, 5941, 5942, 5943, 5944, 5945, 5946, 5947, 5948, + 5949, 5950, 5951, 5952, 5953, 5954, 5955, 5956, 5957, 5958, + 5959, 5960, 5961, 5962, 5963, 5964, 5965, 5966, 5967, 5968, + 5969, 5970, 5971, 5972, 5973, 5974, 5975, 5976, 5977, 5978, + 5979, 5980, 5981, 5982, 5983, 5984, 5985, 5986, 5987, 5988, + 5989, 5990, 5991, 5992, 5993, 5994, 5995, 5996, 5997, 5998, + 5999, 6000, 6001, 6002, 6003, 6004, 6005, 6006, 6007, 6008, + 6009, 6010, 6011, 6012, 6013, 6014, 6015, 6016, 6017, 6018, + 6019, 6020, 6021, 6022, 6023, 6024, 6025, 6026, 6027, 6028, + 6029, 6030, 6031, 6032, 6033, 6034, 6035, 6036, 6037, 6038, + 6039, 6040, 6041, 6042, 6043, 6044, 6045, 6046, 6047, 6048, + 6049, 6050, 6051, 6052, 6053, 6054, 6055, 6056, 6057, 6058, + 6059, 6060, 6061, 6062, 6063, 6064, 6065, 6066, 6067, 6068, + 6069, 6070, 6071, 6072, 6073, 6074, 6075, 6076, 6077, 6078, + 6079, 6080, 6081, 6082, 6083, 6084, 6085, 6086, 6087, 6088, + 6089, 6090, 6091, 6092, 6093, 6094, 6095, 6096, 6097, 6098, + 6099, 6100, 6101, 6102, 6103, 6104, 6105, 6106, 6107, 6108, + 6109, 6110, 6111, 6112, 6113, 6114, 2044, 2060, 4621, 997, + 1235, 473, 1186, 4622, 920, 3378, 6115, 6116, 379, 1108, + 4313, 2657, 2735, 3934, 6117, 3809, 636, 3233, 573, 1026, + 3693, 3435, 2974, 3300, 2298, 4105, 854, 2937, 2463, 393, + 2581, 2417, 539, 752, 1280, 2750, 2480, 140, 1161, 440, + 708, 1569, 665, 2497, 1746, 1291, 1523, 3000, 164, 1603, + 847, 1331, 537, 1997, 486, 508, 1693, 2418, 1970, 2227, + 878, 1220, 299, 1030, 969, 652, 2751, 624, 1137, 3301, + 2619, 65, 3302, 2045, 1761, 1859, 3120, 1930, 3694, 3516, + 663, 1767, 852, 835, 3695, 269, 767, 2826, 2339, 1305, + 896, 1150, 770, 1616, 6118, 506, 1502, 2075, 1012, 2519, + 775, 2520, 2975, 2340, 2938, 4314, 3028, 2086, 1224, 1943, + 2286, 6119, 3072, 4315, 2240, 1273, 1987, 3935, 1557, 175, + 597, 985, 3517, 2419, 2521, 1416, 3029, 585, 938, 1931, + 1007, 1052, 1932, 1685, 6120, 3379, 4316, 4623, 804, 599, + 3121, 1333, 2128, 2539, 1159, 1554, 2032, 3810, 687, 2033, + 2904, 952, 675, 1467, 3436, 6121, 2241, 1096, 1786, 2440, + 1543, 1924, 980, 1813, 2228, 781, 2692, 1879, 728, 1918, + 3696, 4624, 548, 1950, 4625, 1809, 1088, 1356, 3303, 2522, + 1944, 502, 972, 373, 513, 2827, 586, 2377, 2391, 1003, + 1976, 1631, 6122, 2464, 1084, 648, 1776, 4626, 2141, 324, + 962, 2012, 2177, 2076, 1384, 742, 2178, 1448, 1173, 1810, + 222, 102, 301, 445, 125, 2420, 662, 2498, 277, 200, 1476, + 1165, 1068, 224, 2562, 1378, 1446, 450, 1880, 659, 791, + 582, 4627, 2939, 3936, 1516, 1274, 555, 2099, 3697, 1020, + 1389, 1526, 3380, 1762, 1723, 1787, 2229, 412, 2114, 1900, + 2392, 3518, 512, 2597, 427, 1925, 2341, 3122, 1653, 1686, + 2465, 2499, 697, 330, 273, 380, 2162, 951, 832, 780, 991, + 1301, 3073, 965, 2270, 3519, 668, 2523, 2636, 1286, 535, + 1407, 518, 671, 957, 2658, 2378, 267, 611, 2197, 3030, + 6123, 248, 2299, 967, 1799, 2356, 850, 1418, 3437, 1876, + 1256, 1480, 2828, 1718, 6124, 6125, 1755, 1664, 2405, 6126, + 4628, 2879, 2829, 499, 2179, 676, 4629, 557, 2329, 2214, + 2090, 325, 3234, 464, 811, 3001, 992, 2342, 2481, 1232, + 1469, 303, 2242, 466, 1070, 2163, 603, 1777, 2091, 4630, + 2752, 4631, 2714, 322, 2659, 1964, 1768, 481, 2188, 1463, + 2330, 2857, 3600, 2092, 3031, 2421, 4632, 2318, 2070, 1849, + 2598, 4633, 1302, 2254, 1668, 1701, 2422, 3811, 2905, 3032, + 3123, 2046, 4106, 1763, 1694, 4634, 1604, 943, 1724, 1454, + 917, 868, 2215, 1169, 2940, 552, 1145, 1800, 1228, 1823, + 1955, 316, 1080, 2510, 361, 1807, 2830, 4107, 2660, 3381, + 1346, 1423, 1134, 4108, 6127, 541, 1263, 1229, 1148, 2540, + 545, 465, 1833, 2880, 3438, 1901, 3074, 2482, 816, 3937, + 713, 1788, 2500, 122, 1575, 195, 1451, 2501, 1111, 6128, + 859, 374, 1225, 2243, 2483, 4317, 390, 1033, 3439, 3075, + 2524, 1687, 266, 793, 1440, 2599, 946, 779, 802, 507, 897, + 1081, 528, 2189, 1292, 711, 1866, 1725, 1167, 1640, 753, + 398, 2661, 1053, 246, 348, 4318, 137, 1024, 3440, 1600, + 2077, 2129, 825, 4319, 698, 238, 521, 187, 2300, 1157, + 2423, 1641, 1605, 1464, 1610, 1097, 2541, 1260, 1436, 759, + 2255, 1814, 2150, 705, 3235, 409, 2563, 3304, 561, 3033, + 2005, 2564, 726, 1956, 2343, 3698, 4109, 949, 3812, 3813, + 3520, 1669, 653, 1379, 2525, 881, 2198, 632, 2256, 1027, + 778, 1074, 733, 1957, 514, 1481, 2466, 554, 2180, 702, + 3938, 1606, 1017, 1398, 6129, 1380, 3521, 921, 993, 1313, + 594, 449, 1489, 1617, 1166, 768, 1426, 1360, 495, 1794, + 3601, 1177, 3602, 1170, 4320, 2344, 476, 425, 3167, 4635, + 3168, 1424, 401, 2662, 1171, 3382, 1998, 1089, 4110, 477, + 3169, 474, 6130, 1909, 596, 2831, 1842, 494, 693, 1051, + 1028, 1207, 3076, 606, 2115, 727, 2790, 1473, 1115, 743, + 3522, 630, 805, 1532, 4321, 2021, 366, 1057, 838, 684, + 1114, 2142, 4322, 2050, 1492, 1892, 1808, 2271, 3814, 2424, + 1971, 1447, 1373, 3305, 1090, 1536, 3939, 3523, 3306, 1455, + 2199, 336, 369, 2331, 1035, 584, 2393, 902, 718, 2600, + 6131, 2753, 463, 2151, 1149, 1611, 2467, 715, 1308, 3124, + 1268, 343, 1413, 3236, 1517, 1347, 2663, 2093, 3940, 2022, + 1131, 1553, 2100, 2941, 1427, 3441, 2942, 1323, 2484, 6132, + 1980, 872, 2368, 2441, 2943, 320, 2369, 2116, 1082, 679, + 1933, 3941, 2791, 3815, 625, 1143, 2023, 422, 2200, 3816, + 6133, 730, 1695, 356, 2257, 1626, 2301, 2858, 2637, 1627, + 1778, 937, 883, 2906, 2693, 3002, 1769, 1086, 400, 1063, + 1325, 3307, 2792, 4111, 3077, 456, 2345, 1046, 747, 6134, + 1524, 884, 1094, 3383, 1474, 2164, 1059, 974, 1688, 2181, + 2258, 1047, 345, 1665, 1187, 358, 875, 3170, 305, 660, + 3524, 2190, 1334, 1135, 3171, 1540, 1649, 2542, 1527, 927, + 968, 2793, 885, 1972, 1850, 482, 500, 2638, 1218, 1109, + 1085, 2543, 1654, 2034, 876, 78, 2287, 1482, 1277, 861, + 1675, 1083, 1779, 724, 2754, 454, 397, 1132, 1612, 2332, + 893, 672, 1237, 257, 2259, 2370, 135, 3384, 337, 2244, + 547, 352, 340, 709, 2485, 1400, 788, 1138, 2511, 540, 772, + 1682, 2260, 2272, 2544, 2013, 1843, 1902, 4636, 1999, 1562, + 2288, 4637, 2201, 1403, 1533, 407, 576, 3308, 1254, 2071, + 978, 3385, 170, 136, 1201, 3125, 2664, 3172, 2394, 213, + 912, 873, 3603, 1713, 2202, 699, 3604, 3699, 813, 3442, + 493, 531, 1054, 468, 2907, 1483, 304, 281, 4112, 1726, + 1252, 2094, 339, 2319, 2130, 2639, 756, 1563, 2944, 748, + 571, 2976, 1588, 2425, 2715, 1851, 1460, 2426, 1528, 1392, + 1973, 3237, 288, 3309, 685, 3386, 296, 892, 2716, 2216, + 1570, 2245, 722, 1747, 2217, 905, 3238, 1103, 6135, 1893, + 1441, 1965, 251, 1805, 2371, 3700, 2601, 1919, 1078, 75, + 2182, 1509, 1592, 1270, 2640, 4638, 2152, 6136, 3310, 3817, + 524, 706, 1075, 292, 3818, 1756, 2602, 317, 98, 3173, 3605, + 3525, 1844, 2218, 3819, 2502, 814, 567, 385, 2908, 1534, + 6137, 534, 1642, 3239, 797, 6138, 1670, 1529, 953, 4323, + 188, 1071, 538, 178, 729, 3240, 2109, 1226, 1374, 2000, + 2357, 2977, 731, 2468, 1116, 2014, 2051, 6139, 1261, 1593, + 803, 2859, 2736, 3443, 556, 682, 823, 1541, 6140, 1369, + 2289, 1706, 2794, 845, 462, 2603, 2665, 1361, 387, 162, + 2358, 1740, 739, 1770, 1720, 1304, 1401, 3241, 1049, 627, + 1571, 2427, 3526, 1877, 3942, 1852, 1500, 431, 1910, 1503, + 677, 297, 2795, 286, 1433, 1038, 1198, 2290, 1133, 1596, + 4113, 4639, 2469, 1510, 1484, 3943, 6141, 2442, 108, 712, + 4640, 2372, 866, 3701, 2755, 3242, 1348, 834, 1945, 1408, + 3527, 2395, 3243, 1811, 824, 994, 1179, 2110, 1548, 1453, + 790, 3003, 690, 4324, 4325, 2832, 2909, 3820, 1860, 3821, + 225, 1748, 310, 346, 1780, 2470, 821, 1993, 2717, 2796, + 828, 877, 3528, 2860, 2471, 1702, 2165, 2910, 2486, 1789, + 453, 359, 2291, 1676, 73, 1164, 1461, 1127, 3311, 421, + 604, 314, 1037, 589, 116, 2487, 737, 837, 1180, 111, 244, + 735, 6142, 2261, 1861, 1362, 986, 523, 418, 581, 2666, + 3822, 103, 855, 503, 1414, 1867, 2488, 1091, 657, 1597, + 979, 605, 1316, 4641, 1021, 2443, 2078, 2001, 1209, 96, + 587, 2166, 1032, 260, 1072, 2153, 173, 94, 226, 3244, 819, + 2006, 4642, 4114, 2203, 231, 1744, 782, 97, 2667, 786, + 3387, 887, 391, 442, 2219, 4326, 1425, 6143, 2694, 633, + 1544, 1202, 483, 2015, 592, 2052, 1958, 2472, 1655, 419, + 129, 4327, 3444, 3312, 1714, 1257, 3078, 4328, 1518, 1098, + 865, 1310, 1019, 1885, 1512, 1734, 469, 2444, 148, 773, + 436, 1815, 1868, 1128, 1055, 4329, 1245, 2756, 3445, 2154, + 1934, 1039, 4643, 579, 1238, 932, 2320, 353, 205, 801, + 115, 2428, 944, 2321, 1881, 399, 2565, 1211, 678, 766, + 3944, 335, 2101, 1459, 1781, 1402, 3945, 2737, 2131, 1010, + 844, 981, 1326, 1013, 550, 1816, 1545, 2620, 1335, 1008, + 371, 2881, 936, 1419, 1613, 3529, 1456, 1395, 2273, 1834, + 2604, 1317, 2738, 2503, 416, 1643, 4330, 806, 1126, 229, + 591, 3946, 1314, 1981, 1576, 1837, 1666, 347, 1790, 977, + 3313, 764, 2861, 1853, 688, 2429, 1920, 1462, 77, 595, + 415, 2002, 3034, 798, 1192, 4115, 6144, 2978, 4331, 3035, + 2695, 2582, 2072, 2566, 430, 2430, 1727, 842, 1396, 3947, + 3702, 613, 377, 278, 236, 1417, 3388, 3314, 3174, 757, + 1869, 107, 3530, 6145, 1194, 623, 2262, 207, 1253, 2167, + 3446, 3948, 492, 1117, 1935, 536, 1838, 2757, 1246, 4332, + 696, 2095, 2406, 1393, 1572, 3175, 1782, 583, 190, 253, + 1390, 2230, 830, 3126, 3389, 934, 3245, 1703, 1749, 2979, + 1870, 2545, 1656, 2204, 869, 2346, 4116, 3176, 1817, 496, + 1764, 4644, 942, 1504, 404, 1903, 1122, 1580, 3606, 2945, + 1022, 515, 372, 1735, 955, 2431, 3036, 6146, 2797, 1110, + 2302, 2798, 617, 6147, 441, 762, 1771, 3447, 3607, 3608, + 1904, 840, 3037, 86, 939, 1385, 572, 1370, 2445, 1336, + 114, 3703, 898, 294, 203, 3315, 703, 1583, 2274, 429, 961, + 4333, 1854, 1951, 3390, 2373, 3704, 4334, 1318, 1381, 966, + 1911, 2322, 1006, 1155, 309, 989, 458, 2718, 1795, 1372, + 1203, 252, 1689, 1363, 3177, 517, 1936, 168, 1490, 562, + 193, 3823, 1042, 4117, 1835, 551, 470, 4645, 395, 489, + 3448, 1871, 1465, 2583, 2641, 417, 1493, 279, 1295, 511, + 1236, 1119, 72, 1231, 1982, 1812, 3004, 871, 1564, 984, + 3449, 1667, 2696, 2096, 4646, 2347, 2833, 1673, 3609, 695, + 3246, 2668, 807, 1183, 4647, 890, 388, 2333, 1801, 1457, + 2911, 1765, 1477, 1031, 3316, 3317, 1278, 3391, 2799, 2292, + 2526, 163, 3450, 4335, 2669, 1404, 1802, 6148, 2323, 2407, + 1584, 1728, 1494, 1824, 1269, 298, 909, 3318, 1034, 1632, + 375, 776, 1683, 2061, 291, 210, 1123, 809, 1249, 1002, + 2642, 3038, 206, 1011, 2132, 144, 975, 882, 1565, 342, + 667, 754, 1442, 2143, 1299, 2303, 2062, 447, 626, 2205, + 1221, 2739, 2912, 1144, 1214, 2206, 2584, 760, 1715, 614, + 950, 1281, 2670, 2621, 810, 577, 1287, 2546, 4648, 242, + 2168, 250, 2643, 691, 123, 2644, 647, 313, 1029, 689, 1357, + 2946, 1650, 216, 771, 1339, 1306, 808, 2063, 549, 913, + 1371, 2913, 2914, 6149, 1466, 1092, 1174, 1196, 1311, 2605, + 2396, 1783, 1796, 3079, 406, 2671, 2117, 3949, 4649, 487, + 1825, 2220, 6150, 2915, 448, 2348, 1073, 6151, 2397, 1707, + 130, 900, 1598, 329, 176, 1959, 2527, 1620, 6152, 2275, + 4336, 3319, 1983, 2191, 3705, 3610, 2155, 3706, 1912, 1513, + 1614, 6153, 1988, 646, 392, 2304, 1589, 3320, 3039, 1826, + 1239, 1352, 1340, 2916, 505, 2567, 1709, 1437, 2408, 2547, + 906, 6154, 2672, 384, 1458, 1594, 1100, 1329, 710, 423, + 3531, 2064, 2231, 2622, 1989, 2673, 1087, 1882, 333, 841, + 3005, 1296, 2882, 2379, 580, 1937, 1827, 1293, 2585, 601, + 574, 249, 1772, 4118, 2079, 1120, 645, 901, 1176, 1690, + 795, 2207, 478, 1434, 516, 1190, 1530, 761, 2080, 930, + 1264, 355, 435, 1552, 644, 1791, 987, 220, 1364, 1163, + 1121, 1538, 306, 2169, 1327, 1222, 546, 2645, 218, 241, + 610, 1704, 3321, 1984, 1839, 1966, 2528, 451, 6155, 2586, + 3707, 2568, 907, 3178, 254, 2947, 186, 1845, 4650, 745, + 432, 1757, 428, 1633, 888, 2246, 2221, 2489, 3611, 2118, + 1258, 1265, 956, 3127, 1784, 4337, 2490, 319, 510, 119, + 457, 3612, 274, 2035, 2007, 4651, 1409, 3128, 970, 2758, + 590, 2800, 661, 2247, 4652, 2008, 3950, 1420, 1549, 3080, + 3322, 3951, 1651, 1375, 2111, 485, 2491, 1429, 1156, 6156, + 2548, 2183, 1495, 831, 1840, 2529, 2446, 501, 1657, 307, + 1894, 3247, 1341, 666, 899, 2156, 1539, 2549, 1559, 886, + 349, 2208, 3081, 2305, 1736, 3824, 2170, 2759, 1014, 1913, + 1386, 542, 1397, 2948, 490, 368, 716, 362, 159, 282, 2569, + 1129, 1658, 1288, 1750, 2674, 276, 649, 2016, 751, 1496, + 658, 1818, 1284, 1862, 2209, 2087, 2512, 3451, 622, 2834, + 376, 117, 1060, 2053, 1208, 1721, 1101, 1443, 247, 1250, + 3179, 1792, 3952, 2760, 2398, 3953, 6157, 2144, 3708, 446, + 2432, 1151, 2570, 3452, 2447, 2761, 2835, 1210, 2448, 3082, + 424, 2222, 1251, 2449, 2119, 2836, 504, 1581, 4338, 602, + 817, 857, 3825, 2349, 2306, 357, 3826, 1470, 1883, 2883, + 255, 958, 929, 2917, 3248, 302, 4653, 1050, 1271, 1751, + 2307, 1952, 1430, 2697, 2719, 2359, 354, 3180, 777, 158, + 2036, 4339, 1659, 4340, 4654, 2308, 2949, 2248, 1146, 2232, + 3532, 2720, 1696, 2623, 3827, 6158, 3129, 1550, 2698, 1485, + 1297, 1428, 637, 931, 2721, 2145, 914, 2550, 2587, 81, + 2450, 612, 827, 2646, 1242, 4655, 1118, 2884, 472, 1855, + 3181, 3533, 3534, 569, 1353, 2699, 1244, 1758, 2588, 4119, + 2009, 2762, 2171, 3709, 1312, 1531, 6159, 1152, 1938, 134, + 1830, 471, 3710, 2276, 1112, 1535, 3323, 3453, 3535, 982, + 1337, 2950, 488, 826, 674, 1058, 1628, 4120, 2017, 522, + 2399, 211, 568, 1367, 3454, 350, 293, 1872, 1139, 3249, + 1399, 1946, 3006, 1300, 2360, 3324, 588, 736, 6160, 2606, + 744, 669, 3536, 3828, 6161, 1358, 199, 723, 848, 933, 851, + 1939, 1505, 1514, 1338, 1618, 1831, 4656, 1634, 3613, 443, + 2740, 3829, 717, 1947, 491, 1914, 6162, 2551, 1542, 4121, + 1025, 6163, 1099, 1223, 198, 3040, 2722, 370, 410, 1905, + 2589, 998, 1248, 3182, 2380, 519, 1449, 4122, 1710, 947, + 928, 1153, 4341, 2277, 344, 2624, 1511, 615, 105, 161, + 1212, 1076, 1960, 3130, 2054, 1926, 1175, 1906, 2473, 414, + 1873, 2801, 6164, 2309, 315, 1319, 3325, 318, 2018, 2146, + 2157, 963, 631, 223, 4342, 4343, 2675, 479, 3711, 1197, + 2625, 3712, 2676, 2361, 6165, 4344, 4123, 6166, 2451, 3183, + 1886, 2184, 1674, 1330, 1711, 1635, 1506, 799, 219, 3250, + 3083, 3954, 1677, 3713, 3326, 2081, 3614, 1652, 2073, 4657, + 1147, 3041, 1752, 643, 1961, 147, 1974, 3955, 6167, 1716, + 2037, 918, 3007, 1994, 120, 1537, 118, 609, 3184, 4345, + 740, 3455, 1219, 332, 1615, 3830, 6168, 1621, 2980, 1582, + 783, 212, 553, 2350, 3714, 1349, 2433, 2082, 4124, 889, + 6169, 2310, 1275, 1410, 973, 166, 1320, 3456, 1797, 1215, + 3185, 2885, 1846, 2590, 2763, 4658, 629, 822, 3008, 763, + 940, 1990, 2862, 439, 2409, 1566, 1240, 1622, 926, 1282, + 1907, 2764, 654, 2210, 1607, 327, 1130, 3956, 1678, 1623, + 6170, 2434, 2192, 686, 608, 3831, 3715, 903, 3957, 3042, + 6171, 2741, 1522, 1915, 1105, 1555, 2552, 1359, 323, 3251, + 4346, 3457, 738, 1354, 2553, 2311, 2334, 1828, 2003, 3832, + 1753, 2351, 1227, 6172, 1887, 4125, 1478, 6173, 2410, 1874, + 1712, 1847, 520, 1204, 2607, 264, 4659, 836, 2677, 2102, + 600, 4660, 3833, 2278, 3084, 6174, 4347, 3615, 1342, 640, + 532, 543, 2608, 1888, 2400, 2591, 1009, 4348, 1497, 341, + 1737, 3616, 2723, 1394, 529, 3252, 1321, 983, 4661, 1515, + 2120, 971, 2592, 924, 287, 1662, 3186, 4349, 2700, 4350, + 1519, 908, 1948, 2452, 156, 796, 1629, 1486, 2223, 2055, + 694, 4126, 1259, 1036, 3392, 1213, 2249, 2742, 1889, 1230, + 3958, 1015, 910, 408, 559, 3617, 4662, 746, 725, 935, 4663, + 3959, 3009, 1289, 563, 867, 4664, 3960, 1567, 2981, 2038, + 2626, 988, 2263, 2381, 4351, 143, 2374, 704, 1895, 6175, + 1188, 3716, 2088, 673, 3085, 2362, 4352, 484, 1608, 1921, + 2765, 2918, 215, 904, 3618, 3537, 894, 509, 976, 3043, + 2701, 3961, 4353, 2837, 2982, 498, 6176, 6177, 1102, 3538, + 1332, 3393, 1487, 1636, 1637, 233, 245, 3962, 383, 650, + 995, 3044, 460, 1520, 1206, 2352, 749, 3327, 530, 700, + 389, 1438, 1560, 1773, 3963, 2264, 719, 2951, 2724, 3834, + 870, 1832, 1644, 1000, 839, 2474, 3717, 197, 1630, 3394, + 365, 2886, 3964, 1285, 2133, 734, 922, 818, 1106, 732, + 480, 2083, 1774, 3458, 923, 2279, 1350, 221, 3086, 85, + 2233, 2234, 3835, 1585, 3010, 2147, 1387, 1705, 2382, 1619, + 2475, 133, 239, 2802, 1991, 1016, 2084, 2383, 411, 2838, + 1113, 651, 1985, 1160, 3328, 990, 1863, 3087, 1048, 1276, + 2647, 265, 2627, 1599, 3253, 2056, 150, 638, 2019, 656, + 853, 326, 1479, 680, 1439, 4354, 1001, 1759, 413, 3459, + 3395, 2492, 1431, 459, 4355, 1125, 3329, 2265, 1953, 1450, + 2065, 2863, 849, 351, 2678, 3131, 3254, 3255, 1104, 1577, + 227, 1351, 1645, 2453, 2193, 1421, 2887, 812, 2121, 634, + 95, 2435, 201, 2312, 4665, 1646, 1671, 2743, 1601, 2554, + 2702, 2648, 2280, 1315, 1366, 2089, 3132, 1573, 3718, 3965, + 1729, 1189, 328, 2679, 1077, 1940, 1136, 558, 1283, 964, + 1195, 621, 2074, 1199, 1743, 3460, 3619, 1896, 1916, 1890, + 3836, 2952, 1154, 2112, 1064, 862, 378, 3011, 2066, 2113, + 2803, 1568, 2839, 6178, 3088, 2919, 1941, 1660, 2004, 1992, + 2194, 142, 707, 1590, 1708, 1624, 1922, 1023, 1836, 1233, + 1004, 2313, 789, 741, 3620, 6179, 1609, 2411, 1200, 4127, + 3719, 3720, 4666, 2057, 3721, 593, 2840, 367, 2920, 1878, + 6180, 3461, 1521, 628, 1168, 692, 2211, 2649, 300, 720, + 2067, 2571, 2953, 3396, 959, 2504, 3966, 3539, 3462, 1977, + 701, 6181, 954, 1043, 800, 681, 183, 3722, 1803, 1730, + 3540, 4128, 2103, 815, 2314, 174, 467, 230, 2454, 1093, + 2134, 755, 3541, 3397, 1141, 1162, 6182, 1738, 2039, 270, + 3256, 2513, 1005, 1647, 2185, 3837, 858, 1679, 1897, 1719, + 2954, 2324, 1806, 402, 670, 167, 4129, 1498, 2158, 2104, + 750, 6183, 915, 189, 1680, 1551, 455, 4356, 1501, 2455, + 405, 1095, 2955, 338, 1586, 1266, 1819, 570, 641, 1324, + 237, 1556, 2650, 1388, 3723, 6184, 1368, 2384, 1343, 1978, + 3089, 2436, 879, 3724, 792, 1191, 758, 3012, 1411, 2135, + 1322, 4357, 240, 4667, 1848, 3725, 1574, 6185, 420, 3045, + 1546, 1391, 714, 4358, 1967, 941, 1864, 863, 664, 426, + 560, 1731, 2680, 1785, 2864, 1949, 2363, 403, 3330, 1415, + 1279, 2136, 1697, 2335, 204, 721, 2097, 3838, 90, 6186, + 2085, 2505, 191, 3967, 124, 2148, 1376, 1798, 1178, 1107, + 1898, 1405, 860, 4359, 1243, 1272, 2375, 2983, 1558, 2456, + 1638, 113, 3621, 578, 1923, 2609, 880, 386, 4130, 784, + 2186, 2266, 1422, 2956, 2172, 1722, 497, 263, 2514, 1267, + 2412, 2610, 177, 2703, 3542, 774, 1927, 1344, 616, 1432, + 1595, 1018, 172, 4360, 2325, 911, 4361, 438, 1468, 3622, + 794, 3968, 2024, 2173, 1681, 1829, 2957, 945, 895, 3090, + 575, 2212, 2476, 475, 2401, 2681, 785, 2744, 1745, 2293, + 2555, 1975, 3133, 2865, 394, 4668, 3839, 635, 4131, 639, + 202, 1507, 2195, 2766, 1345, 1435, 2572, 3726, 1908, 1184, + 1181, 2457, 3727, 3134, 4362, 843, 2611, 437, 916, 4669, + 234, 769, 1884, 3046, 3047, 3623, 833, 6187, 1639, 2250, + 2402, 1355, 1185, 2010, 2047, 999, 525, 1732, 1290, 1488, + 2612, 948, 1578, 3728, 2413, 2477, 1216, 2725, 2159, 334, + 3840, 1328, 3624, 2921, 1525, 4132, 564, 1056, 891, 4363, + 1444, 1698, 2385, 2251, 3729, 1365, 2281, 2235, 1717, 6188, + 864, 3841, 2515, 444, 527, 2767, 2922, 3625, 544, 461, + 6189, 566, 209, 2437, 3398, 2098, 1065, 2068, 3331, 3626, + 3257, 2137) diff --git a/APPS_UNCOMPILED/lib/chardet/jpcntx.py b/APPS_UNCOMPILED/lib/chardet/jpcntx.py new file mode 100644 index 0000000..73e3e5c --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/jpcntx.py @@ -0,0 +1,180 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/jpcntx.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 19643 bytes +jp2CharContext = ((0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1), + (2, 4, 0, 4, 0, 3, 0, 4, 0, 3, 4, 4, 4, 2, 4, 3, 3, 4, 3, 2, 3, 3, 4, 2, 3, 3, 3, 2, 4, 1, 4, 3, 3, 1, 5, 4, 3, 4, 3, 4, 3, 5, 3, 0, 3, 5, 4, 2, 0, 3, 1, 0, 3, 3, 0, 3, 3, 0, 1, 1, 0, 4, 3, 0, 3, 3, 0, 4, 0, 2, 0, 3, 5, 5, 5, 5, 4, 0, 4, 1, 0, 3, 4), + (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2), + (0, 4, 0, 5, 0, 5, 0, 4, 0, 4, 5, 4, 4, 3, 5, 3, 5, 1, 5, 3, 4, 3, 4, 4, 3, 4, 3, 3, 4, 3, 5, 4, 4, 3, 5, 5, 3, 5, 5, 5, 3, 5, 5, 3, 4, 5, 5, 3, 1, 3, 2, 0, 3, 4, 0, 4, 2, 0, 4, 2, 1, 5, 3, 2, 3, 5, 0, 4, 0, 2, 0, 5, 4, 4, 5, 4, 5, 0, 4, 0, 0, 4, 4), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), + (0, 3, 0, 4, 0, 3, 0, 3, 0, 4, 5, 4, 3, 3, 3, 3, 4, 3, 5, 4, 4, 3, 5, 4, 4, 3, 4, 3, 4, 4, 4, 4, 5, 3, 4, 4, 3, 4, 5, 5, 4, 5, 5, 1, 4, 5, 4, 3, 0, 3, 3, 1, 3, 3, 0, 4, 4, 0, 3, 3, 1, 5, 3, 3, 3, 5, 0, 4, 0, 3, 0, 4, 4, 3, 4, 3, 3, 0, 4, 1, 1, 3, 4), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), + (0, 4, 0, 3, 0, 3, 0, 4, 0, 3, 4, 4, 3, 2, 2, 1, 2, 1, 3, 1, 3, 3, 3, 3, 3, 4, 3, 1, 3, 3, 5, 3, 3, 0, 4, 3, 0, 5, 4, 3, 3, 5, 4, 4, 3, 4, 4, 5, 0, 1, 2, 0, 1, 2, 0, 2, 2, 0, 1, 0, 0, 5, 2, 2, 1, 4, 0, 3, 0, 1, 0, 4, 4, 3, 5, 4, 3, 0, 2, 1, 0, 4, 3), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), + (0, 3, 0, 5, 0, 4, 0, 2, 1, 4, 4, 2, 4, 1, 4, 2, 4, 2, 4, 3, 3, 3, 4, 3, 3, 3, 3, 1, 4, 2, 3, 3, 3, 1, 4, 4, 1, 1, 1, 4, 3, 3, 2, 0, 2, 4, 3, 2, 0, 3, 3, 0, 3, 1, 1, 0, 0, 0, 3, 3, 0, 4, 2, 2, 3, 4, 0, 4, 0, 3, 0, 4, 4, 5, 3, 4, 4, 0, 3, 0, 0, 1, 4), + (1, 4, 0, 4, 0, 4, 0, 4, 0, 3, 5, 4, 4, 3, 4, 3, 5, 4, 3, 3, 4, 3, 5, 4, 4, 4, 4, 3, 4, 2, 4, 3, 3, 1, 5, 4, 3, 2, 4, 5, 4, 5, 5, 4, 4, 5, 4, 4, 0, 3, 2, 2, 3, 3, 0, 4, 3, 1, 3, 2, 1, 4, 3, 3, 4, 5, 0, 3, 0, 2, 0, 4, 5, 5, 4, 5, 4, 0, 4, 0, 0, 5, 4), + (0, 5, 0, 5, 0, 4, 0, 3, 0, 4, 4, 3, 4, 3, 3, 3, 4, 0, 4, 4, 4, 3, 4, 3, 4, 3, 3, 1, 4, 2, 4, 3, 4, 0, 5, 4, 1, 4, 5, 4, 4, 5, 3, 2, 4, 3, 4, 3, 2, 4, 1, 3, 3, 3, 2, 3, 2, 0, 4, 3, 3, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 5, 4, 4, 4, 3, 0, 4, 1, 0, 1, 3), + (0, 3, 1, 4, 0, 3, 0, 2, 0, 3, 4, 4, 3, 1, 4, 2, 3, 3, 4, 3, 4, 3, 4, 3, 4, 4, 3, 2, 3, 1, 5, 4, 4, 1, 4, 4, 3, 5, 4, 4, 3, 5, 5, 4, 3, 4, 4, 3, 1, 2, 3, 1, 2, 2, 0, 3, 2, 0, 3, 1, 0, 5, 3, 3, 3, 4, 3, 3, 3, 3, 4, 4, 4, 4, 5, 4, 2, 0, 3, 3, 2, 4, 3), + (0, 2, 0, 3, 0, 1, 0, 1, 0, 0, 3, 2, 0, 0, 2, 0, 1, 0, 2, 1, 3, 3, 3, 1, 2, 3, 1, 0, 1, 0, 4, 2, 1, 1, 3, 3, 0, 4, 3, 3, 1, 4, 3, 3, 0, 3, 3, 2, 0, 0, 0, 0, 1, 0, 0, 2, 0, 0, 0, 0, 0, 4, 1, 0, 2, 3, 2, 2, 2, 1, 3, 3, 3, 4, 4, 3, 2, 0, 3, 1, 0, 3, 3), + (0, 4, 0, 4, 0, 3, 0, 3, 0, 4, 4, 4, 3, 3, 3, 3, 3, 3, 4, 3, 4, 2, 4, 3, 4, 3, 3, 2, 4, 3, 4, 5, 4, 1, 4, 5, 3, 5, 4, 5, 3, 5, 4, 0, 3, 5, 5, 3, 1, 3, 3, 2, 2, 3, 0, 3, 4, 1, 3, 3, 2, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 5, 4, 4, 5, 3, 0, 4, 1, 0, 3, 4), + (0, 2, 0, 3, 0, 3, 0, 0, 0, 2, 2, 2, 1, 0, 1, 0, 0, 0, 3, 0, 3, 0, 3, 0, 1, 3, 1, 0, 3, 1, 3, 3, 3, 1, 3, 3, 3, 0, 1, 3, 1, 3, 4, 0, 0, 3, 1, 1, 0, 3, 2, 0, 0, 0, 0, 1, 3, 0, 1, 0, 0, 3, 3, 2, 0, 3, 0, 0, 0, 0, 0, 3, 4, 3, 4, 3, 3, 0, 3, 0, 0, 2, 3), + (2, 3, 0, 3, 0, 2, 0, 1, 0, 3, 3, 4, 3, 1, 3, 1, 1, 1, 3, 1, 4, 3, 4, 3, 3, 3, 0, 0, 3, 1, 5, 4, 3, 1, 4, 3, 2, 5, 5, 4, 4, 4, 4, 3, 3, 4, 4, 4, 0, 2, 1, 1, 3, 2, 0, 1, 2, 0, 0, 1, 0, 4, 1, 3, 3, 3, 0, 3, 0, 1, 0, 4, 4, 4, 5, 5, 3, 0, 2, 0, 0, 4, 4), + (0, 2, 0, 1, 0, 3, 1, 3, 0, 2, 3, 3, 3, 0, 3, 1, 0, 0, 3, 0, 3, 2, 3, 1, 3, 2, 1, 1, 0, 0, 4, 2, 1, 0, 2, 3, 1, 4, 3, 2, 0, 4, 4, 3, 1, 3, 1, 3, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 4, 1, 1, 1, 2, 0, 3, 0, 0, 0, 3, 4, 2, 4, 3, 2, 0, 1, 0, 0, 3, 3), + (0, 1, 0, 4, 0, 5, 0, 4, 0, 2, 4, 4, 2, 3, 3, 2, 3, 3, 5, 3, 3, 3, 4, 3, 4, 2, 3, 0, 4, 3, 3, 3, 4, 1, 4, 3, 2, 1, 5, 5, 3, 4, 5, 1, 3, 5, 4, 2, 0, 3, 3, 0, 1, 3, 0, 4, 2, 0, 1, 3, 1, 4, 3, 3, 3, 3, 0, 3, 0, 1, 0, 3, 4, 4, 4, 5, 5, 0, 3, 0, 1, 4, 5), + (0, 2, 0, 3, 0, 3, 0, 0, 0, 2, 3, 1, 3, 0, 4, 0, 1, 1, 3, 0, 3, 4, 3, 2, 3, 1, 0, 3, 3, 2, 3, 1, 3, 0, 2, 3, 0, 2, 1, 4, 1, 2, 2, 0, 0, 3, 3, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 0, 2, 2, 0, 3, 2, 1, 3, 3, 0, 2, 0, 2, 0, 0, 3, 3, 1, 2, 4, 0, 3, 0, 2, 2, 3), + (2, 4, 0, 5, 0, 4, 0, 4, 0, 2, 4, 4, 4, 3, 4, 3, 3, 3, 1, 2, 4, 3, 4, 3, 4, 4, 5, 0, 3, 3, 3, 3, 2, 0, 4, 3, 1, 4, 3, 4, 1, 4, 4, 3, 3, 4, 4, 3, 1, 2, 3, 0, 4, 2, 0, 4, 1, 0, 3, 3, 0, 4, 3, 3, 3, 4, 0, 4, 0, 2, 0, 3, 5, 3, 4, 5, 2, 0, 3, 0, 0, 4, 5), + (0, 3, 0, 4, 0, 1, 0, 1, 0, 1, 3, 2, 2, 1, 3, 0, 3, 0, 2, 0, 2, 0, 3, 0, 2, 0, 0, 0, 1, 0, 1, 1, 0, 0, 3, 1, 0, 0, 0, 4, 0, 3, 1, 0, 2, 1, 3, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 4, 2, 2, 3, 1, 0, 3, 0, 0, 0, 1, 4, 4, 4, 3, 0, 0, 4, 0, 0, 1, 4), + (1, 4, 1, 5, 0, 3, 0, 3, 0, 4, 5, 4, 4, 3, 5, 3, 3, 4, 4, 3, 4, 1, 3, 3, 3, 3, 2, 1, 4, 1, 5, 4, 3, 1, 4, 4, 3, 5, 4, 4, 3, 5, 4, 3, 3, 4, 4, 4, 0, 3, 3, 1, 2, 3, 0, 3, 1, 0, 3, 3, 0, 5, 4, 4, 4, 4, 4, 4, 3, 3, 5, 4, 4, 3, 3, 5, 4, 0, 3, 2, 0, 4, 4), + (0, 2, 0, 3, 0, 1, 0, 0, 0, 1, 3, 3, 3, 2, 4, 1, 3, 0, 3, 1, 3, 0, 2, 2, 1, 1, 0, 0, 2, 0, 4, 3, 1, 0, 4, 3, 0, 4, 4, 4, 1, 4, 3, 1, 1, 3, 3, 1, 0, 2, 0, 0, 1, 3, 0, 0, 0, 0, 2, 0, 0, 4, 3, 2, 4, 3, 5, 4, 3, 3, 3, 4, 3, 3, 4, 3, 3, 0, 2, 1, 0, 3, 3), + (0, 2, 0, 4, 0, 3, 0, 2, 0, 2, 5, 5, 3, 4, 4, 4, 4, 1, 4, 3, 3, 0, 4, 3, 4, 3, 1, 3, 3, 2, 4, 3, 0, 3, 4, 3, 0, 3, 4, 4, 2, 4, 4, 0, 4, 5, 3, 3, 2, 2, 1, 1, 1, 2, 0, 1, 5, 0, 3, 3, 2, 4, 3, 3, 3, 4, 0, 3, 0, 2, 0, 4, 4, 3, 5, 5, 0, 0, 3, 0, 2, 3, 3), + (0, 3, 0, 4, 0, 3, 0, 1, 0, 3, 4, 3, 3, 1, 3, 3, 3, 0, 3, 1, 3, 0, 4, 3, 3, 1, 1, 0, 3, 0, 3, 3, 0, 0, 4, 4, 0, 1, 5, 4, 3, 3, 5, 0, 3, 3, 4, 3, 0, 2, 0, 1, 1, 1, 0, 1, 3, 0, 1, 2, 1, 3, 3, 2, 3, 3, 0, 3, 0, 1, 0, 1, 3, 3, 4, 4, 1, 0, 1, 2, 2, 1, 3), + (0, 1, 0, 4, 0, 4, 0, 3, 0, 1, 3, 3, 3, 2, 3, 1, 1, 0, 3, 0, 3, 3, 4, 3, 2, 4, 2, 0, 1, 0, 4, 3, 2, 0, 4, 3, 0, 5, 3, 3, 2, 4, 4, 4, 3, 3, 3, 4, 0, 1, 3, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 4, 2, 3, 3, 3, 0, 3, 0, 0, 0, 4, 4, 4, 5, 3, 2, 0, 3, 3, 0, 3, 5), + (0, 2, 0, 3, 0, 0, 0, 3, 0, 1, 3, 0, 2, 0, 0, 0, 1, 0, 3, 1, 1, 3, 3, 0, 0, 3, 0, 0, 3, 0, 2, 3, 1, 0, 3, 1, 0, 3, 3, 2, 0, 4, 2, 2, 0, 2, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 2, 0, 1, 0, 1, 0, 0, 0, 1, 3, 1, 2, 0, 0, 0, 1, 0, 0, 1, 4), + (0, 3, 0, 3, 0, 5, 0, 1, 0, 2, 4, 3, 1, 3, 3, 2, 1, 1, 5, 2, 1, 0, 5, 1, 2, 0, 0, 0, 3, 3, 2, 2, 3, 2, 4, 3, 0, 0, 3, 3, 1, 3, 3, 0, 2, 5, 3, 4, 0, 3, 3, 0, 1, 2, 0, 2, 2, 0, 3, 2, 0, 2, 2, 3, 3, 3, 0, 2, 0, 1, 0, 3, 4, 4, 2, 5, 4, 0, 3, 0, 0, 3, 5), + (0, 3, 0, 3, 0, 3, 0, 1, 0, 3, 3, 3, 3, 0, 3, 0, 2, 0, 2, 1, 1, 0, 2, 0, 1, 0, 0, 0, 2, 1, 0, 0, 1, 0, 3, 2, 0, 0, 3, 3, 1, 2, 3, 1, 0, 3, 3, 0, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 3, 1, 2, 3, 0, 3, 0, 1, 0, 3, 2, 1, 0, 4, 3, 0, 1, 1, 0, 3, 3), + (0, 4, 0, 5, 0, 3, 0, 3, 0, 4, 5, 5, 4, 3, 5, 3, 4, 3, 5, 3, 3, 2, 5, 3, 4, 4, 4, 3, 4, 3, 4, 5, 5, 3, 4, 4, 3, 4, 4, 5, 4, 4, 4, 3, 4, 5, 5, 4, 2, 3, 4, 2, 3, 4, 0, 3, 3, 1, 4, 3, 2, 4, 3, 3, 5, 5, 0, 3, 0, 3, 0, 5, 5, 5, 5, 4, 4, 0, 4, 0, 1, 4, 4), + (0, 4, 0, 4, 0, 3, 0, 3, 0, 3, 5, 4, 4, 2, 3, 2, 5, 1, 3, 2, 5, 1, 4, 2, 3, 2, 3, 3, 4, 3, 3, 3, 3, 2, 5, 4, 1, 3, 3, 5, 3, 4, 4, 0, 4, 4, 3, 1, 1, 3, 1, 0, 2, 3, 0, 2, 3, 0, 3, 0, 0, 4, 3, 1, 3, 4, 0, 3, 0, 2, 0, 4, 4, 4, 3, 4, 5, 0, 4, 0, 0, 3, 4), + (0, 3, 0, 3, 0, 3, 1, 2, 0, 3, 4, 4, 3, 3, 3, 0, 2, 2, 4, 3, 3, 1, 3, 3, 3, 1, 1, 0, 3, 1, 4, 3, 2, 3, 4, 4, 2, 4, 4, 4, 3, 4, 4, 3, 2, 4, 4, 3, 1, 3, 3, 1, 3, 3, 0, 4, 1, 0, 2, 2, 1, 4, 3, 2, 3, 3, 5, 4, 3, 3, 5, 4, 4, 3, 3, 0, 4, 0, 3, 2, 2, 4, 4), + (0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 2, 1, 3, 0, 0, 0, 0, 0, 2, 0, 1, 2, 1, 0, 0, 1, 0, 0, 0, 0, 3, 0, 0, 1, 0, 1, 1, 3, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 0, 3, 4, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1), + (0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 4, 1, 4, 0, 3, 0, 4, 0, 3, 0, 4, 0, 3, 0, 3, 0, 4, 1, 5, 1, 4, 0, 0, 3, 0, 5, 0, 5, 2, 0, 1, 0, 0, 0, 2, 1, 4, 0, 1, 3, 0, 0, 3, 0, 0, 3, 1, 1, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0), + (1, 4, 0, 5, 0, 3, 0, 2, 0, 3, 5, 4, 4, 3, 4, 3, 5, 3, 4, 3, 3, 0, 4, 3, 3, 3, 3, 3, 3, 2, 4, 4, 3, 1, 3, 4, 4, 5, 4, 4, 3, 4, 4, 1, 3, 5, 4, 3, 3, 3, 1, 2, 2, 3, 3, 1, 3, 1, 3, 3, 3, 5, 3, 3, 4, 5, 0, 3, 0, 3, 0, 3, 4, 3, 4, 4, 3, 0, 3, 0, 2, 4, 3), + (0, 1, 0, 4, 0, 0, 0, 0, 0, 1, 4, 0, 4, 1, 4, 2, 4, 0, 3, 0, 1, 0, 1, 0, 0, 0, 0, 0, 2, 0, 3, 1, 1, 1, 0, 3, 0, 0, 0, 1, 2, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 3, 0, 0, 0, 0, 3, 2, 0, 2, 2, 0, 1, 0, 0, 0, 2, 3, 2, 3, 3, 0, 0, 0, 0, 2, 1, 0), + (0, 5, 1, 5, 0, 3, 0, 3, 0, 5, 4, 4, 5, 1, 5, 3, 3, 0, 4, 3, 4, 3, 5, 3, 4, 3, 3, 2, 4, 3, 4, 3, 3, 0, 3, 3, 1, 4, 4, 3, 4, 4, 4, 3, 4, 5, 5, 3, 2, 3, 1, 1, 3, 3, 1, 3, 1, 1, 3, 3, 2, 4, 5, 3, 3, 5, 0, 4, 0, 3, 0, 4, 4, 3, 5, 3, 3, 0, 3, 4, 0, 4, 3), + (0, 5, 0, 5, 0, 3, 0, 2, 0, 4, 4, 3, 5, 2, 4, 3, 3, 3, 4, 4, 4, 3, 5, 3, 5, 3, 3, 1, 4, 0, 4, 3, 3, 0, 3, 3, 0, 4, 4, 4, 4, 5, 4, 3, 3, 5, 5, 3, 2, 3, 1, 2, 3, 2, 0, 1, 0, 0, 3, 2, 2, 4, 4, 3, 1, 5, 0, 4, 0, 3, 0, 4, 3, 1, 3, 2, 1, 0, 3, 3, 0, 3, 3), + (0, 4, 0, 5, 0, 5, 0, 4, 0, 4, 5, 5, 5, 3, 4, 3, 3, 2, 5, 4, 4, 3, 5, 3, 5, 3, 4, 0, 4, 3, 4, 4, 3, 2, 4, 4, 3, 4, 5, 4, 4, 5, 5, 0, 3, 5, 5, 4, 1, 3, 3, 2, 3, 3, 1, 3, 1, 0, 4, 3, 1, 4, 4, 3, 4, 5, 0, 4, 0, 2, 0, 4, 3, 4, 4, 3, 3, 0, 4, 0, 0, 5, 5), + (0, 4, 0, 4, 0, 5, 0, 1, 1, 3, 3, 4, 4, 3, 4, 1, 3, 0, 5, 1, 3, 0, 3, 1, 3, 1, 1, 0, 3, 0, 3, 3, 4, 0, 4, 3, 0, 4, 4, 4, 3, 4, 4, 0, 3, 5, 4, 1, 0, 3, 0, 0, 2, 3, 0, 3, 1, 0, 3, 1, 0, 3, 2, 1, 3, 5, 0, 3, 0, 1, 0, 3, 2, 3, 3, 4, 4, 0, 2, 2, 0, 4, 4), + (2, 4, 0, 5, 0, 4, 0, 3, 0, 4, 5, 5, 4, 3, 5, 3, 5, 3, 5, 3, 5, 2, 5, 3, 4, 3, 3, 4, 3, 4, 5, 3, 2, 1, 5, 4, 3, 2, 3, 4, 5, 3, 4, 1, 2, 5, 4, 3, 0, 3, 3, 0, 3, 2, 0, 2, 3, 0, 4, 1, 0, 3, 4, 3, 3, 5, 0, 3, 0, 1, 0, 4, 5, 5, 5, 4, 3, 0, 4, 2, 0, 3, 5), + (0, 5, 0, 4, 0, 4, 0, 2, 0, 5, 4, 3, 4, 3, 4, 3, 3, 3, 4, 3, 4, 2, 5, 3, 5, 3, 4, 1, 4, 3, 4, 4, 4, 0, 3, 5, 0, 4, 4, 4, 4, 5, 3, 1, 3, 4, 5, 3, 3, 3, 3, 3, 3, 3, 0, 2, 2, 0, 3, 3, 2, 4, 3, 3, 3, 5, 3, 4, 1, 3, 3, 5, 3, 2, 0, 0, 0, 0, 4, 3, 1, 3, 3), + (0, 1, 0, 3, 0, 3, 0, 1, 0, 1, 3, 3, 3, 2, 3, 3, 3, 0, 3, 0, 0, 0, 3, 1, 3, 0, 0, 0, 2, 2, 2, 3, 0, 0, 3, 2, 0, 1, 2, 4, 1, 3, 3, 0, 0, 3, 3, 3, 0, 1, 0, 0, 2, 1, 0, 0, 3, 0, 3, 1, 0, 3, 0, 0, 1, 3, 0, 2, 0, 1, 0, 3, 3, 1, 3, 3, 0, 0, 1, 1, 0, 3, 3), + (0, 2, 0, 3, 0, 2, 1, 4, 0, 2, 2, 3, 1, 1, 3, 1, 1, 0, 2, 0, 3, 1, 2, 3, 1, 3, 0, 0, 1, 0, 4, 3, 2, 3, 3, 3, 1, 4, 2, 3, 3, 3, 3, 1, 0, 3, 1, 4, 0, 1, 1, 0, 1, 2, 0, 1, 1, 0, 1, 1, 0, 3, 1, 3, 2, 2, 0, 1, 0, 0, 0, 2, 3, 3, 3, 1, 0, 0, 0, 0, 0, 2, 3), + (0, 5, 0, 4, 0, 5, 0, 2, 0, 4, 5, 5, 3, 3, 4, 3, 3, 1, 5, 4, 4, 2, 4, 4, 4, 3, 4, 2, 4, 3, 5, 5, 4, 3, 3, 4, 3, 3, 5, 5, 4, 5, 5, 1, 3, 4, 5, 3, 1, 4, 3, 1, 3, 3, 0, 3, 3, 1, 4, 3, 1, 4, 5, 3, 3, 5, 0, 4, 0, 3, 0, 5, 3, 3, 1, 4, 3, 0, 4, 0, 1, 5, 3), + (0, 5, 0, 5, 0, 4, 0, 2, 0, 4, 4, 3, 4, 3, 3, 3, 3, 3, 5, 4, 4, 4, 4, 4, 4, 5, 3, 3, 5, 2, 4, 4, 4, 3, 4, 4, 3, 3, 4, 4, 5, 5, 3, 3, 4, 3, 4, 3, 3, 4, 3, 3, 3, 3, 1, 2, 2, 1, 4, 3, 3, 5, 4, 4, 3, 4, 0, 4, 0, 3, 0, 4, 4, 4, 4, 4, 1, 0, 4, 2, 0, 2, 4), + (0, 4, 0, 4, 0, 3, 0, 1, 0, 3, 5, 2, 3, 0, 3, 0, 2, 1, 4, 2, 3, 3, 4, 1, 4, 3, 3, 2, 4, 1, 3, 3, 3, 0, 3, 3, 0, 0, 3, 3, 3, 5, 3, 3, 3, 3, 3, 2, 0, 2, 0, 0, 2, 0, 0, 2, 0, 0, 1, 0, 0, 3, 1, 2, 2, 3, 0, 3, 0, 2, 0, 4, 4, 3, 3, 4, 1, 0, 3, 0, 0, 2, 4), + (0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 2, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 3, 1, 3, 0, 3, 2, 0, 0, 0, 1, 0, 3, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 2, 0, 0, 0, 0, 0, 0, 2), + (0, 2, 1, 3, 0, 2, 0, 2, 0, 3, 3, 3, 3, 1, 3, 1, 3, 3, 3, 3, 3, 3, 4, 2, 2, 1, 2, 1, 4, 0, 4, 3, 1, 3, 3, 3, 2, 4, 3, 5, 4, 3, 3, 3, 3, 3, 3, 3, 0, 1, 3, 0, 2, 0, 0, 1, 0, 0, 1, 0, 0, 4, 2, 0, 2, 3, 0, 3, 3, 0, 3, 3, 4, 2, 3, 1, 4, 0, 1, 2, 0, 2, 3), + (0, 3, 0, 3, 0, 1, 0, 3, 0, 2, 3, 3, 3, 0, 3, 1, 2, 0, 3, 3, 2, 3, 3, 2, 3, 2, 3, 1, 3, 0, 4, 3, 2, 0, 3, 3, 1, 4, 3, 3, 2, 3, 4, 3, 1, 3, 3, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 4, 1, 1, 0, 3, 0, 3, 1, 0, 2, 3, 3, 3, 3, 3, 1, 0, 0, 2, 0, 3, 3), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 2, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 3), + (0, 2, 0, 3, 1, 3, 0, 3, 0, 2, 3, 3, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 1, 3, 0, 2, 3, 1, 1, 4, 3, 3, 2, 3, 3, 1, 2, 2, 4, 1, 3, 3, 0, 1, 4, 2, 3, 0, 1, 3, 0, 3, 0, 0, 1, 3, 0, 2, 0, 0, 3, 3, 2, 1, 3, 0, 3, 0, 2, 0, 3, 4, 4, 4, 3, 1, 0, 3, 0, 0, 3, 3), + (0, 2, 0, 1, 0, 2, 0, 0, 0, 1, 3, 2, 2, 1, 3, 0, 1, 1, 3, 0, 3, 2, 3, 1, 2, 0, 2, 0, 1, 1, 3, 3, 3, 0, 3, 3, 1, 1, 2, 3, 2, 3, 3, 1, 2, 3, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 2, 1, 2, 1, 3, 0, 3, 0, 0, 0, 3, 4, 4, 4, 3, 2, 0, 2, 0, 0, 2, 4), + (0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 3), + (0, 3, 0, 3, 0, 2, 0, 3, 0, 3, 3, 3, 2, 3, 2, 2, 2, 0, 3, 1, 3, 3, 3, 2, 3, 3, 0, 0, 3, 0, 3, 2, 2, 0, 2, 3, 1, 4, 3, 4, 3, 3, 2, 3, 1, 5, 4, 4, 0, 3, 1, 2, 1, 3, 0, 3, 1, 1, 2, 0, 2, 3, 1, 3, 1, 3, 0, 3, 0, 1, 0, 3, 3, 4, 4, 2, 1, 0, 2, 1, 0, 2, 4), + (0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 4, 2, 5, 1, 4, 0, 2, 0, 2, 1, 3, 1, 4, 0, 2, 1, 0, 0, 2, 1, 4, 1, 1, 0, 3, 3, 0, 5, 1, 3, 2, 3, 3, 1, 0, 3, 2, 3, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 1, 0, 3, 0, 2, 0, 1, 0, 3, 3, 3, 4, 3, 3, 0, 0, 0, 0, 2, 3), + (0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 1, 0, 0, 0, 0, 0, 3), + (0, 1, 0, 3, 0, 4, 0, 3, 0, 2, 4, 3, 1, 0, 3, 2, 2, 1, 3, 1, 2, 2, 3, 1, 1, 1, 2, 1, 3, 0, 1, 2, 0, 1, 3, 2, 1, 3, 0, 5, 5, 1, 0, 0, 1, 3, 2, 1, 0, 3, 0, 0, 1, 0, 0, 0, 0, 0, 3, 4, 0, 1, 1, 1, 3, 2, 0, 2, 0, 1, 0, 2, 3, 3, 1, 2, 3, 0, 1, 0, 1, 0, 4), + (0, 0, 0, 1, 0, 3, 0, 3, 0, 2, 2, 1, 0, 0, 4, 0, 3, 0, 3, 1, 3, 0, 3, 0, 3, 0, 1, 0, 3, 0, 3, 1, 3, 0, 3, 3, 0, 0, 1, 2, 1, 1, 1, 0, 1, 2, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 2, 0, 0, 2, 0, 0, 0, 0, 2, 3, 3, 3, 3, 0, 0, 0, 0, 1, 4), + (0, 0, 0, 3, 0, 3, 0, 0, 0, 0, 3, 1, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 0, 2, 0, 2, 3, 0, 0, 2, 2, 3, 1, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 2, 0, 0, 0, 0, 2, 3), + (2, 4, 0, 5, 0, 5, 0, 4, 0, 3, 4, 3, 3, 3, 4, 3, 3, 3, 4, 3, 4, 4, 5, 4, 5, 5, 5, 2, 3, 0, 5, 5, 4, 1, 5, 4, 3, 1, 5, 4, 3, 4, 4, 3, 3, 4, 3, 3, 0, 3, 2, 0, 2, 3, 0, 3, 0, 0, 3, 3, 0, 5, 3, 2, 3, 3, 0, 3, 0, 3, 0, 3, 4, 5, 4, 5, 3, 0, 4, 3, 0, 3, 4), + (0, 3, 0, 3, 0, 3, 0, 3, 0, 3, 3, 4, 3, 2, 3, 2, 3, 0, 4, 3, 3, 3, 3, 3, 3, 3, 3, 0, 3, 2, 4, 3, 3, 1, 3, 4, 3, 4, 4, 4, 3, 4, 4, 3, 2, 4, 4, 1, 0, 2, 0, 0, 1, 1, 0, 2, 0, 0, 3, 1, 0, 5, 3, 2, 1, 3, 0, 3, 0, 1, 2, 4, 3, 2, 4, 3, 3, 0, 3, 2, 0, 4, 4), + (0, 3, 0, 3, 0, 1, 0, 0, 0, 1, 4, 3, 3, 2, 3, 1, 3, 1, 4, 2, 3, 2, 4, 2, 3, 4, 3, 0, 2, 2, 3, 3, 3, 0, 3, 3, 3, 0, 3, 4, 1, 3, 3, 0, 3, 4, 3, 3, 0, 1, 1, 0, 1, 0, 0, 0, 4, 0, 3, 0, 0, 3, 1, 2, 1, 3, 0, 4, 0, 1, 0, 4, 3, 3, 4, 3, 3, 0, 2, 0, 0, 3, 3), + (0, 3, 0, 4, 0, 1, 0, 3, 0, 3, 4, 3, 3, 0, 3, 3, 3, 1, 3, 1, 3, 3, 4, 3, 3, 3, 0, 0, 3, 1, 5, 3, 3, 1, 3, 3, 2, 5, 4, 3, 3, 4, 5, 3, 2, 5, 3, 4, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 1, 1, 0, 4, 2, 2, 1, 3, 0, 3, 0, 2, 0, 4, 4, 3, 5, 3, 2, 0, 1, 1, 0, 3, 4), + (0, 5, 0, 4, 0, 5, 0, 2, 0, 4, 4, 3, 3, 2, 3, 3, 3, 1, 4, 3, 4, 1, 5, 3, 4, 3, 4, 0, 4, 2, 4, 3, 4, 1, 5, 4, 0, 4, 4, 4, 4, 5, 4, 1, 3, 5, 4, 2, 1, 4, 1, 1, 3, 2, 0, 3, 1, 0, 3, 2, 1, 4, 3, 3, 3, 4, 0, 4, 0, 3, 0, 4, 4, 4, 3, 3, 3, 0, 4, 2, 0, 3, 4), + (1, 4, 0, 4, 0, 3, 0, 1, 0, 3, 3, 3, 1, 1, 3, 3, 2, 2, 3, 3, 1, 0, 3, 2, 2, 1, 2, 0, 3, 1, 2, 1, 2, 0, 3, 2, 0, 2, 2, 3, 3, 4, 3, 0, 3, 3, 1, 2, 0, 1, 1, 3, 1, 2, 0, 0, 3, 0, 1, 1, 0, 3, 2, 2, 3, 3, 0, 3, 0, 0, 0, 2, 3, 3, 4, 3, 3, 0, 1, 0, 0, 1, 4), + (0, 4, 0, 4, 0, 4, 0, 0, 0, 3, 4, 4, 3, 1, 4, 2, 3, 2, 3, 3, 3, 1, 4, 3, 4, 0, 3, 0, 4, 2, 3, 3, 2, 2, 5, 4, 2, 1, 3, 4, 3, 4, 3, 1, 3, 3, 4, 2, 0, 2, 1, 0, 3, 3, 0, 0, 2, 0, 3, 1, 0, 4, 4, 3, 4, 3, 0, 4, 0, 1, 0, 2, 4, 4, 4, 4, 4, 0, 3, 2, 0, 3, 3), + (0, 0, 0, 1, 0, 4, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 3, 2, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 2), + (0, 2, 0, 3, 0, 4, 0, 4, 0, 1, 3, 3, 3, 0, 4, 0, 2, 1, 2, 1, 1, 1, 2, 0, 3, 1, 1, 0, 1, 0, 3, 1, 0, 0, 3, 3, 2, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 2, 2, 0, 3, 1, 0, 0, 1, 0, 1, 1, 0, 1, 2, 0, 3, 0, 0, 0, 0, 1, 0, 0, 3, 3, 4, 3, 1, 0, 1, 0, 3, 0, 2), + (0, 0, 0, 3, 0, 5, 0, 0, 0, 0, 1, 0, 2, 0, 3, 1, 0, 1, 3, 0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 4, 0, 0, 0, 2, 3, 0, 1, 4, 1, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 3, 0, 0, 0, 0, 0, 3), + (0, 2, 0, 5, 0, 5, 0, 1, 0, 2, 4, 3, 3, 2, 5, 1, 3, 2, 3, 3, 3, 0, 4, 1, 2, 0, 3, 0, 4, 0, 2, 2, 1, 1, 5, 3, 0, 0, 1, 4, 2, 3, 2, 0, 3, 3, 3, 2, 0, 2, 4, 1, 1, 2, 0, 1, 1, 0, 3, 1, 0, 1, 3, 1, 2, 3, 0, 2, 0, 0, 0, 1, 3, 5, 4, 4, 4, 0, 3, 0, 0, 1, 3), + (0, 4, 0, 5, 0, 4, 0, 4, 0, 4, 5, 4, 3, 3, 4, 3, 3, 3, 4, 3, 4, 4, 5, 3, 4, 5, 4, 2, 4, 2, 3, 4, 3, 1, 4, 4, 1, 3, 5, 4, 4, 5, 5, 4, 4, 5, 5, 5, 2, 3, 3, 1, 4, 3, 1, 3, 3, 0, 3, 3, 1, 4, 3, 4, 4, 4, 0, 3, 0, 4, 0, 3, 3, 4, 4, 5, 0, 0, 4, 3, 0, 4, 5), + (0, 4, 0, 4, 0, 3, 0, 3, 0, 3, 4, 4, 4, 3, 3, 2, 4, 3, 4, 3, 4, 3, 5, 3, 4, 3, 2, 1, 4, 2, 4, 4, 3, 1, 3, 4, 2, 4, 5, 5, 3, 4, 5, 4, 1, 5, 4, 3, 0, 3, 2, 2, 3, 2, 1, 3, 1, 0, 3, 3, 3, 5, 3, 3, 3, 5, 4, 4, 2, 3, 3, 4, 3, 3, 3, 2, 1, 0, 3, 2, 1, 4, 3), + (0, 4, 0, 5, 0, 4, 0, 3, 0, 3, 5, 5, 3, 2, 4, 3, 4, 0, 5, 4, 4, 1, 4, 4, 4, 3, 3, 3, 4, 3, 5, 5, 2, 3, 3, 4, 1, 2, 5, 5, 3, 5, 5, 2, 3, 5, 5, 4, 0, 3, 2, 0, 3, 3, 1, 1, 5, 1, 4, 1, 0, 4, 3, 2, 3, 5, 0, 4, 0, 3, 0, 5, 4, 3, 4, 3, 0, 0, 4, 1, 0, 4, 4), + (1, 3, 0, 4, 0, 2, 0, 2, 0, 2, 5, 5, 3, 3, 3, 3, 3, 0, 4, 2, 3, 4, 4, 4, 3, 4, 0, 0, 3, 4, 5, 4, 3, 3, 3, 3, 2, 5, 5, 4, 5, 5, 5, 4, 3, 5, 5, 5, 1, 3, 1, 0, 1, 0, 0, 3, 2, 0, 4, 2, 0, 5, 2, 3, 2, 4, 1, 3, 0, 3, 0, 4, 5, 4, 5, 4, 3, 0, 4, 2, 0, 5, 4), + (0, 3, 0, 4, 0, 5, 0, 3, 0, 3, 4, 4, 3, 2, 3, 2, 3, 3, 3, 3, 3, 2, 4, 3, 3, 2, 2, 0, 3, 3, 3, 3, 3, 1, 3, 3, 3, 0, 4, 4, 3, 4, 4, 1, 1, 4, 4, 2, 0, 3, 1, 0, 1, 1, 0, 4, 1, 0, 2, 3, 1, 3, 3, 1, 3, 4, 0, 3, 0, 1, 0, 3, 1, 3, 0, 0, 1, 0, 2, 0, 0, 4, 4), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), + (0, 3, 0, 3, 0, 2, 0, 3, 0, 1, 5, 4, 3, 3, 3, 1, 4, 2, 1, 2, 3, 4, 4, 2, 4, 4, 5, 0, 3, 1, 4, 3, 4, 0, 4, 3, 3, 3, 2, 3, 2, 5, 3, 4, 3, 2, 2, 3, 0, 0, 3, 0, 2, 1, 0, 1, 2, 0, 0, 0, 0, 2, 1, 1, 3, 1, 0, 2, 0, 4, 0, 3, 4, 4, 4, 5, 2, 0, 2, 0, 0, 1, 3), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 4, 2, 1, 1, 0, 1, 0, 3, 2, 0, 0, 3, 1, 1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 1, 0, 0, 0, 2, 0, 0, 0, 1, 4, 0, 4, 2, 1, 0, 0, 0, 0, 0, 1), + (0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 2, 0, 2, 1, 0, 0, 1, 2, 1, 0, 1, 1, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 1, 0, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 2), + (0, 4, 0, 4, 0, 4, 0, 3, 0, 4, 4, 3, 4, 2, 4, 3, 2, 0, 4, 4, 4, 3, 5, 3, 5, 3, 3, 2, 4, 2, 4, 3, 4, 3, 1, 4, 0, 2, 3, 4, 4, 4, 3, 3, 3, 4, 4, 4, 3, 4, 1, 3, 4, 3, 2, 1, 2, 1, 3, 3, 3, 4, 4, 3, 3, 5, 0, 4, 0, 3, 0, 4, 3, 3, 3, 2, 1, 0, 3, 0, 0, 3, 3), + (0, 4, 0, 3, 0, 3, 0, 3, 0, 3, 5, 5, 3, 3, 3, 3, 4, 3, 4, 3, 3, 3, 4, 4, 4, 3, 3, 3, 3, 4, 3, 5, 3, 3, 1, 3, 2, 4, 5, 5, 5, 5, 4, 3, 4, 5, 5, 3, 2, 2, 3, 3, 3, 3, 2, 3, 3, 1, 2, 3, 2, 4, 3, 3, 3, 4, 0, 4, 0, 2, 0, 4, 3, 2, 2, 1, 2, 0, 3, 0, 0, 4, 1)) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 + self._rel_sample = [ + 0] * self.NUM_OF_CATEGORY + self._need_to_skip_char_num = 0 + self._last_char_order = -1 + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i[:i + 2]]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + elif order != -1: + if self._last_char_order != -1: + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + return self.DONT_KNOW + + def get_order(self, byte_str): + return (-1, 1) + + +class SJISContextAnalysis(JapaneseContextAnalysis): + + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_orderParse error at or near `COME_FROM' instruction at offset 106_0 + + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + + def get_order(self, byte_str): + if not byte_str: + return (-1, 1) + else: + first_char = byte_str[0] + if not first_char == 142: + if 161 <= first_char <= 254: + char_len = 2 + elif first_char == 143: + char_len = 3 + else: + char_len = 1 + if len(byte_str) > 1: + second_char = byte_str[1] + if first_char == 164: + if 161 <= second_char <= 243: + return ( + second_char - 161, char_len) + return ( + -1, char_len) \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/chardet/langbulgarianmodel.py b/APPS_UNCOMPILED/lib/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..7a518f0 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/langbulgarianmodel.py @@ -0,0 +1,4566 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/langbulgarianmodel.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 105685 bytes +from chardet.sbcharsetprober import SingleByteCharSetModel +BULGARIAN_LANG_MODEL = {63:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 1, + 9: 1, + 20: 1, + 11: 1, + 3: 1, + 23: 1, + 15: 1, + 2: 0, + 26: 1, + 12: 1, + 10: 1, + 14: 1, + 6: 1, + 4: 1, + 13: 1, + 7: 1, + 8: 1, + 5: 1, + 19: 0, + 29: 1, + 25: 1, + 22: 0, + 21: 1, + 27: 1, + 24: 1, + 17: 0, + 52: 0, + 42: 0, + 16: 1, + 58: 0, + 62: 0}, + 45:{ + 63: 0, + 45: 0, + 31: 0, + 32: 1, + 35: 1, + 43: 0, + 37: 1, + 44: 0, + 55: 0, + 47: 0, + 40: 1, + 59: 0, + 33: 1, + 46: 0, + 38: 1, + 36: 0, + 41: 1, + 30: 1, + 39: 1, + 28: 1, + 34: 0, + 51: 0, + 48: 0, + 49: 1, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 0, + 23: 0, + 15: 0, + 2: 0, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 0, + 13: 0, + 7: 0, + 8: 0, + 5: 0, + 19: 0, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 0, + 52: 0, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 31:{ + 63: 0, + 45: 1, + 31: 1, + 32: 1, + 35: 2, + 43: 1, + 37: 2, + 44: 2, + 55: 1, + 47: 2, + 40: 1, + 59: 1, + 33: 1, + 46: 2, + 38: 1, + 36: 2, + 41: 1, + 30: 2, + 39: 2, + 28: 2, + 34: 2, + 51: 1, + 48: 2, + 49: 1, + 53: 1, + 50: 1, + 54: 1, + 57: 2, + 61: 0, + 60: 0, + 56: 1, + 1: 1, + 18: 2, + 9: 2, + 20: 2, + 11: 2, + 3: 1, + 23: 1, + 15: 2, + 2: 0, + 26: 2, + 12: 2, + 10: 3, + 14: 2, + 6: 3, + 4: 0, + 13: 2, + 7: 2, + 8: 2, + 5: 2, + 19: 1, + 29: 2, + 25: 1, + 22: 1, + 21: 1, + 27: 1, + 24: 0, + 17: 0, + 52: 0, + 42: 0, + 16: 1, + 58: 0, + 62: 0}, + 32:{ + 63: 0, + 45: 0, + 31: 2, + 32: 2, + 35: 1, + 43: 1, + 37: 2, + 44: 1, + 55: 1, + 47: 2, + 40: 1, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 2, + 41: 2, + 30: 1, + 39: 1, + 28: 2, + 34: 2, + 51: 1, + 48: 2, + 49: 1, + 53: 1, + 50: 1, + 54: 0, + 57: 1, + 61: 2, + 60: 1, + 56: 1, + 1: 3, + 18: 0, + 9: 0, + 20: 0, + 11: 1, + 3: 3, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 0, + 10: 2, + 14: 0, + 6: 0, + 4: 3, + 13: 0, + 7: 2, + 8: 1, + 5: 0, + 19: 2, + 29: 0, + 25: 1, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 3, + 52: 1, + 42: 1, + 16: 2, + 58: 0, + 62: 0}, + 35:{ + 63: 0, + 45: 0, + 31: 2, + 32: 1, + 35: 1, + 43: 0, + 37: 1, + 44: 2, + 55: 0, + 47: 0, + 40: 2, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 1, + 41: 1, + 30: 1, + 39: 2, + 28: 2, + 34: 1, + 51: 1, + 48: 2, + 49: 0, + 53: 1, + 50: 0, + 54: 0, + 57: 0, + 61: 1, + 60: 1, + 56: 2, + 1: 3, + 18: 1, + 9: 0, + 20: 0, + 11: 1, + 3: 3, + 23: 1, + 15: 2, + 2: 3, + 26: 0, + 12: 1, + 10: 2, + 14: 1, + 6: 2, + 4: 2, + 13: 1, + 7: 2, + 8: 2, + 5: 2, + 19: 1, + 29: 0, + 25: 1, + 22: 0, + 21: 2, + 27: 0, + 24: 0, + 17: 2, + 52: 1, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 43:{ + 63: 0, + 45: 0, + 31: 2, + 32: 1, + 35: 0, + 43: 0, + 37: 1, + 44: 2, + 55: 0, + 47: 1, + 40: 1, + 59: 0, + 33: 1, + 46: 1, + 38: 0, + 36: 1, + 41: 1, + 30: 0, + 39: 1, + 28: 1, + 34: 0, + 51: 1, + 48: 1, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 1, + 61: 1, + 60: 0, + 56: 0, + 1: 2, + 18: 1, + 9: 1, + 20: 0, + 11: 1, + 3: 3, + 23: 1, + 15: 0, + 2: 2, + 26: 0, + 12: 1, + 10: 2, + 14: 1, + 6: 1, + 4: 2, + 13: 0, + 7: 2, + 8: 0, + 5: 0, + 19: 2, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 1, + 17: 2, + 52: 1, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 37:{ + 63: 0, + 45: 0, + 31: 2, + 32: 1, + 35: 2, + 43: 1, + 37: 2, + 44: 2, + 55: 2, + 47: 1, + 40: 2, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 1, + 41: 2, + 30: 2, + 39: 1, + 28: 2, + 34: 1, + 51: 1, + 48: 1, + 49: 0, + 53: 1, + 50: 1, + 54: 0, + 57: 0, + 61: 1, + 60: 1, + 56: 1, + 1: 3, + 18: 0, + 9: 2, + 20: 0, + 11: 0, + 3: 3, + 23: 3, + 15: 1, + 2: 3, + 26: 0, + 12: 0, + 10: 1, + 14: 1, + 6: 2, + 4: 3, + 13: 0, + 7: 2, + 8: 0, + 5: 0, + 19: 2, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 2, + 52: 1, + 42: 2, + 16: 1, + 58: 0, + 62: 0}, + 44:{ + 63: 0, + 45: 0, + 31: 1, + 32: 1, + 35: 2, + 43: 1, + 37: 1, + 44: 1, + 55: 1, + 47: 1, + 40: 1, + 59: 1, + 33: 2, + 46: 2, + 38: 1, + 36: 2, + 41: 2, + 30: 1, + 39: 2, + 28: 2, + 34: 2, + 51: 1, + 48: 2, + 49: 1, + 53: 2, + 50: 1, + 54: 1, + 57: 1, + 61: 0, + 60: 0, + 56: 1, + 1: 0, + 18: 1, + 9: 2, + 20: 1, + 11: 2, + 3: 0, + 23: 1, + 15: 1, + 2: 0, + 26: 1, + 12: 2, + 10: 2, + 14: 2, + 6: 2, + 4: 0, + 13: 1, + 7: 2, + 8: 2, + 5: 1, + 19: 1, + 29: 1, + 25: 1, + 22: 0, + 21: 1, + 27: 1, + 24: 1, + 17: 1, + 52: 0, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 55:{ + 63: 0, + 45: 0, + 31: 1, + 32: 0, + 35: 1, + 43: 0, + 37: 1, + 44: 1, + 55: 0, + 47: 0, + 40: 1, + 59: 0, + 33: 1, + 46: 0, + 38: 0, + 36: 1, + 41: 1, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 1, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 0, + 9: 0, + 20: 0, + 11: 1, + 3: 2, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 2, + 13: 1, + 7: 1, + 8: 0, + 5: 0, + 19: 1, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 1, + 42: 1, + 16: 0, + 58: 0, + 62: 0}, + 47:{ + 63: 0, + 45: 0, + 31: 2, + 32: 1, + 35: 1, + 43: 1, + 37: 1, + 44: 1, + 55: 0, + 47: 1, + 40: 1, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 2, + 41: 1, + 30: 1, + 39: 1, + 28: 1, + 34: 1, + 51: 1, + 48: 0, + 49: 1, + 53: 1, + 50: 0, + 54: 0, + 57: 0, + 61: 1, + 60: 0, + 56: 1, + 1: 3, + 18: 1, + 9: 2, + 20: 1, + 11: 2, + 3: 2, + 23: 0, + 15: 0, + 2: 1, + 26: 0, + 12: 0, + 10: 2, + 14: 1, + 6: 1, + 4: 1, + 13: 0, + 7: 1, + 8: 0, + 5: 0, + 19: 1, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 0, + 42: 1, + 16: 0, + 58: 0, + 62: 0}, + 40:{ + 63: 0, + 45: 1, + 31: 1, + 32: 1, + 35: 1, + 43: 1, + 37: 1, + 44: 2, + 55: 1, + 47: 2, + 40: 1, + 59: 1, + 33: 2, + 46: 2, + 38: 2, + 36: 2, + 41: 1, + 30: 1, + 39: 2, + 28: 2, + 34: 2, + 51: 0, + 48: 1, + 49: 1, + 53: 1, + 50: 1, + 54: 1, + 57: 1, + 61: 0, + 60: 0, + 56: 2, + 1: 1, + 18: 1, + 9: 3, + 20: 2, + 11: 1, + 3: 1, + 23: 0, + 15: 3, + 2: 0, + 26: 1, + 12: 1, + 10: 2, + 14: 2, + 6: 2, + 4: 0, + 13: 1, + 7: 2, + 8: 2, + 5: 2, + 19: 0, + 29: 1, + 25: 1, + 22: 1, + 21: 1, + 27: 1, + 24: 1, + 17: 0, + 52: 0, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 59:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 1, + 44: 1, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 1, + 41: 1, + 30: 0, + 39: 0, + 28: 1, + 34: 1, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 1, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 1, + 1: 0, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 1, + 23: 0, + 15: 0, + 2: 0, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 2, + 13: 0, + 7: 0, + 8: 0, + 5: 0, + 19: 0, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 0, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 33:{ + 63: 0, + 45: 1, + 31: 2, + 32: 1, + 35: 1, + 43: 1, + 37: 1, + 44: 1, + 55: 0, + 47: 1, + 40: 2, + 59: 0, + 33: 1, + 46: 1, + 38: 0, + 36: 2, + 41: 2, + 30: 2, + 39: 1, + 28: 2, + 34: 1, + 51: 1, + 48: 1, + 49: 1, + 53: 1, + 50: 0, + 54: 0, + 57: 0, + 61: 1, + 60: 1, + 56: 0, + 1: 3, + 18: 0, + 9: 1, + 20: 0, + 11: 0, + 3: 2, + 23: 1, + 15: 0, + 2: 2, + 26: 0, + 12: 0, + 10: 2, + 14: 1, + 6: 2, + 4: 3, + 13: 0, + 7: 3, + 8: 1, + 5: 0, + 19: 2, + 29: 0, + 25: 1, + 22: 0, + 21: 0, + 27: 1, + 24: 0, + 17: 2, + 52: 1, + 42: 2, + 16: 0, + 58: 0, + 62: 0}, + 46:{ + 63: 1, + 45: 0, + 31: 2, + 32: 1, + 35: 1, + 43: 2, + 37: 1, + 44: 2, + 55: 0, + 47: 1, + 40: 2, + 59: 0, + 33: 1, + 46: 1, + 38: 0, + 36: 1, + 41: 2, + 30: 1, + 39: 0, + 28: 1, + 34: 1, + 51: 1, + 48: 0, + 49: 1, + 53: 1, + 50: 1, + 54: 0, + 57: 0, + 61: 1, + 60: 1, + 56: 1, + 1: 2, + 18: 0, + 9: 1, + 20: 0, + 11: 0, + 3: 3, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 2, + 13: 0, + 7: 0, + 8: 0, + 5: 0, + 19: 2, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 1, + 42: 2, + 16: 1, + 58: 0, + 62: 0}, + 38:{ + 63: 0, + 45: 0, + 31: 2, + 32: 1, + 35: 2, + 43: 0, + 37: 1, + 44: 1, + 55: 0, + 47: 1, + 40: 2, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 1, + 41: 2, + 30: 1, + 39: 1, + 28: 2, + 34: 1, + 51: 1, + 48: 1, + 49: 0, + 53: 1, + 50: 0, + 54: 0, + 57: 0, + 61: 1, + 60: 0, + 56: 1, + 1: 3, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 3, + 23: 0, + 15: 0, + 2: 3, + 26: 0, + 12: 0, + 10: 2, + 14: 0, + 6: 2, + 4: 3, + 13: 0, + 7: 1, + 8: 0, + 5: 0, + 19: 2, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 2, + 52: 1, + 42: 2, + 16: 1, + 58: 0, + 62: 0}, + 36:{ + 63: 0, + 45: 0, + 31: 2, + 32: 2, + 35: 1, + 43: 1, + 37: 2, + 44: 2, + 55: 1, + 47: 1, + 40: 2, + 59: 1, + 33: 2, + 46: 1, + 38: 1, + 36: 1, + 41: 2, + 30: 1, + 39: 1, + 28: 2, + 34: 2, + 51: 1, + 48: 1, + 49: 1, + 53: 1, + 50: 1, + 54: 1, + 57: 0, + 61: 1, + 60: 1, + 56: 1, + 1: 3, + 18: 0, + 9: 0, + 20: 1, + 11: 0, + 3: 3, + 23: 0, + 15: 0, + 2: 3, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 3, + 13: 0, + 7: 0, + 8: 0, + 5: 1, + 19: 1, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 1, + 24: 0, + 17: 0, + 52: 0, + 42: 2, + 16: 2, + 58: 0, + 62: 0}, + 41:{ + 63: 0, + 45: 0, + 31: 1, + 32: 1, + 35: 2, + 43: 1, + 37: 2, + 44: 1, + 55: 1, + 47: 1, + 40: 1, + 59: 1, + 33: 2, + 46: 2, + 38: 2, + 36: 2, + 41: 2, + 30: 1, + 39: 2, + 28: 2, + 34: 2, + 51: 1, + 48: 1, + 49: 1, + 53: 0, + 50: 1, + 54: 1, + 57: 1, + 61: 0, + 60: 0, + 56: 1, + 1: 1, + 18: 2, + 9: 2, + 20: 2, + 11: 1, + 3: 1, + 23: 1, + 15: 1, + 2: 0, + 26: 1, + 12: 2, + 10: 2, + 14: 1, + 6: 1, + 4: 0, + 13: 2, + 7: 2, + 8: 2, + 5: 3, + 19: 1, + 29: 1, + 25: 1, + 22: 1, + 21: 2, + 27: 0, + 24: 2, + 17: 0, + 52: 0, + 42: 0, + 16: 1, + 58: 0, + 62: 0}, + 30:{ + 63: 0, + 45: 1, + 31: 2, + 32: 1, + 35: 1, + 43: 1, + 37: 1, + 44: 1, + 55: 0, + 47: 1, + 40: 2, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 1, + 41: 2, + 30: 2, + 39: 2, + 28: 2, + 34: 1, + 51: 2, + 48: 1, + 49: 0, + 53: 1, + 50: 1, + 54: 1, + 57: 0, + 61: 1, + 60: 1, + 56: 0, + 1: 3, + 18: 0, + 9: 0, + 20: 0, + 11: 2, + 3: 3, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 1, + 10: 3, + 14: 0, + 6: 1, + 4: 3, + 13: 0, + 7: 3, + 8: 1, + 5: 1, + 19: 2, + 29: 1, + 25: 1, + 22: 0, + 21: 1, + 27: 1, + 24: 0, + 17: 2, + 52: 1, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 39:{ + 63: 0, + 45: 1, + 31: 2, + 32: 1, + 35: 1, + 43: 2, + 37: 2, + 44: 2, + 55: 0, + 47: 1, + 40: 2, + 59: 0, + 33: 1, + 46: 0, + 38: 1, + 36: 1, + 41: 2, + 30: 2, + 39: 1, + 28: 1, + 34: 1, + 51: 1, + 48: 1, + 49: 1, + 53: 1, + 50: 1, + 54: 0, + 57: 0, + 61: 1, + 60: 1, + 56: 1, + 1: 3, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 2, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 1, + 4: 3, + 13: 0, + 7: 0, + 8: 1, + 5: 0, + 19: 3, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 0, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 28:{ + 63: 1, + 45: 0, + 31: 3, + 32: 2, + 35: 2, + 43: 1, + 37: 2, + 44: 2, + 55: 1, + 47: 1, + 40: 2, + 59: 0, + 33: 2, + 46: 1, + 38: 1, + 36: 1, + 41: 2, + 30: 2, + 39: 1, + 28: 2, + 34: 2, + 51: 1, + 48: 1, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 1, + 60: 1, + 56: 1, + 1: 3, + 18: 1, + 9: 2, + 20: 1, + 11: 1, + 3: 3, + 23: 0, + 15: 0, + 2: 3, + 26: 0, + 12: 2, + 10: 3, + 14: 2, + 6: 1, + 4: 3, + 13: 3, + 7: 2, + 8: 0, + 5: 3, + 19: 2, + 29: 2, + 25: 1, + 22: 1, + 21: 1, + 27: 0, + 24: 0, + 17: 3, + 52: 1, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 34:{ + 63: 0, + 45: 0, + 31: 2, + 32: 2, + 35: 1, + 43: 0, + 37: 1, + 44: 2, + 55: 0, + 47: 0, + 40: 2, + 59: 0, + 33: 2, + 46: 1, + 38: 1, + 36: 1, + 41: 2, + 30: 1, + 39: 2, + 28: 2, + 34: 1, + 51: 1, + 48: 1, + 49: 0, + 53: 1, + 50: 0, + 54: 0, + 57: 0, + 61: 1, + 60: 0, + 56: 1, + 1: 3, + 18: 1, + 9: 1, + 20: 0, + 11: 0, + 3: 3, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 1, + 10: 1, + 14: 0, + 6: 0, + 4: 3, + 13: 0, + 7: 3, + 8: 0, + 5: 0, + 19: 2, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 2, + 52: 0, + 42: 1, + 16: 2, + 58: 0, + 62: 0}, + 51:{ + 63: 0, + 45: 1, + 31: 1, + 32: 1, + 35: 1, + 43: 1, + 37: 1, + 44: 2, + 55: 1, + 47: 1, + 40: 1, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 1, + 41: 0, + 30: 1, + 39: 1, + 28: 1, + 34: 2, + 51: 0, + 48: 1, + 49: 1, + 53: 1, + 50: 1, + 54: 1, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 1, + 18: 1, + 9: 2, + 20: 1, + 11: 1, + 3: 2, + 23: 1, + 15: 1, + 2: 2, + 26: 1, + 12: 2, + 10: 1, + 14: 1, + 6: 2, + 4: 2, + 13: 1, + 7: 1, + 8: 2, + 5: 1, + 19: 1, + 29: 0, + 25: 1, + 22: 0, + 21: 2, + 27: 1, + 24: 0, + 17: 1, + 52: 0, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 48:{ + 63: 0, + 45: 0, + 31: 2, + 32: 1, + 35: 1, + 43: 0, + 37: 0, + 44: 1, + 55: 0, + 47: 0, + 40: 2, + 59: 0, + 33: 1, + 46: 1, + 38: 0, + 36: 1, + 41: 1, + 30: 2, + 39: 1, + 28: 2, + 34: 1, + 51: 1, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 2, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 0, + 10: 2, + 14: 0, + 6: 0, + 4: 2, + 13: 0, + 7: 2, + 8: 0, + 5: 0, + 19: 1, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 1, + 42: 1, + 16: 0, + 58: 0, + 62: 0}, + 49:{ + 63: 0, + 45: 0, + 31: 1, + 32: 0, + 35: 1, + 43: 1, + 37: 1, + 44: 1, + 55: 0, + 47: 0, + 40: 1, + 59: 0, + 33: 0, + 46: 1, + 38: 1, + 36: 1, + 41: 1, + 30: 1, + 39: 1, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 1, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 0, + 9: 1, + 20: 0, + 11: 0, + 3: 2, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 0, + 10: 1, + 14: 1, + 6: 0, + 4: 2, + 13: 0, + 7: 2, + 8: 0, + 5: 0, + 19: 2, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 2, + 52: 1, + 42: 1, + 16: 0, + 58: 0, + 62: 0}, + 53:{ + 63: 0, + 45: 0, + 31: 1, + 32: 0, + 35: 1, + 43: 0, + 37: 0, + 44: 1, + 55: 0, + 47: 0, + 40: 2, + 59: 0, + 33: 2, + 46: 1, + 38: 1, + 36: 0, + 41: 0, + 30: 0, + 39: 1, + 28: 2, + 34: 0, + 51: 1, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 0, + 9: 2, + 20: 0, + 11: 0, + 3: 2, + 23: 0, + 15: 1, + 2: 2, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 1, + 13: 0, + 7: 1, + 8: 0, + 5: 0, + 19: 1, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 0, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 50:{ + 63: 0, + 45: 0, + 31: 2, + 32: 1, + 35: 0, + 43: 0, + 37: 0, + 44: 1, + 55: 0, + 47: 1, + 40: 1, + 59: 0, + 33: 1, + 46: 1, + 38: 0, + 36: 1, + 41: 1, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 1, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 3, + 23: 1, + 15: 0, + 2: 2, + 26: 0, + 12: 0, + 10: 1, + 14: 0, + 6: 0, + 4: 2, + 13: 0, + 7: 1, + 8: 0, + 5: 0, + 19: 2, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 1, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 54:{ + 63: 0, + 45: 0, + 31: 1, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 1, + 55: 0, + 47: 1, + 40: 1, + 59: 0, + 33: 1, + 46: 0, + 38: 0, + 36: 1, + 41: 1, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 1, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 0, + 9: 2, + 20: 0, + 11: 0, + 3: 2, + 23: 0, + 15: 0, + 2: 2, + 26: 0, + 12: 1, + 10: 1, + 14: 1, + 6: 1, + 4: 2, + 13: 1, + 7: 1, + 8: 0, + 5: 0, + 19: 2, + 29: 0, + 25: 0, + 22: 0, + 21: 1, + 27: 0, + 24: 0, + 17: 1, + 52: 1, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 57:{ + 63: 0, + 45: 0, + 31: 1, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 1, + 55: 0, + 47: 0, + 40: 1, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 1, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 2, + 23: 0, + 15: 0, + 2: 1, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 1, + 13: 0, + 7: 1, + 8: 0, + 5: 0, + 19: 1, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 0, + 42: 0, + 16: 1, + 58: 0, + 62: 0}, + 61:{ + 63: 0, + 45: 0, + 31: 0, + 32: 1, + 35: 1, + 43: 0, + 37: 1, + 44: 0, + 55: 1, + 47: 1, + 40: 0, + 59: 0, + 33: 1, + 46: 2, + 38: 1, + 36: 1, + 41: 0, + 30: 1, + 39: 2, + 28: 1, + 34: 1, + 51: 0, + 48: 0, + 49: 1, + 53: 1, + 50: 1, + 54: 1, + 57: 1, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 0, + 23: 0, + 15: 0, + 2: 0, + 26: 0, + 12: 0, + 10: 1, + 14: 0, + 6: 1, + 4: 0, + 13: 0, + 7: 1, + 8: 0, + 5: 0, + 19: 0, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 0, + 52: 0, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 60:{ + 63: 0, + 45: 0, + 31: 1, + 32: 1, + 35: 0, + 43: 1, + 37: 1, + 44: 0, + 55: 1, + 47: 0, + 40: 0, + 59: 0, + 33: 1, + 46: 1, + 38: 0, + 36: 1, + 41: 0, + 30: 0, + 39: 1, + 28: 1, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 1, + 9: 1, + 20: 2, + 11: 1, + 3: 0, + 23: 2, + 15: 1, + 2: 1, + 26: 0, + 12: 1, + 10: 1, + 14: 1, + 6: 1, + 4: 0, + 13: 1, + 7: 1, + 8: 1, + 5: 1, + 19: 0, + 29: 0, + 25: 1, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 0, + 52: 0, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 56:{ + 63: 0, + 45: 0, + 31: 0, + 32: 1, + 35: 1, + 43: 1, + 37: 1, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 1, + 46: 1, + 38: 1, + 36: 1, + 41: 0, + 30: 0, + 39: 0, + 28: 1, + 34: 2, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 1, + 9: 1, + 20: 1, + 11: 1, + 3: 0, + 23: 0, + 15: 1, + 2: 1, + 26: 1, + 12: 1, + 10: 1, + 14: 2, + 6: 2, + 4: 0, + 13: 2, + 7: 1, + 8: 1, + 5: 1, + 19: 0, + 29: 0, + 25: 1, + 22: 0, + 21: 0, + 27: 1, + 24: 0, + 17: 0, + 52: 0, + 42: 1, + 16: 0, + 58: 0, + 62: 0}, + 1:{ + 63: 1, + 45: 1, + 31: 1, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 1, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 1, + 18: 3, + 9: 3, + 20: 3, + 11: 3, + 3: 3, + 23: 3, + 15: 3, + 2: 3, + 26: 3, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 2, + 13: 3, + 7: 3, + 8: 3, + 5: 3, + 19: 3, + 29: 3, + 25: 3, + 22: 3, + 21: 3, + 27: 3, + 24: 3, + 17: 0, + 52: 0, + 42: 1, + 16: 3, + 58: 0, + 62: 0}, + 18:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 0, + 9: 3, + 20: 1, + 11: 2, + 3: 3, + 23: 1, + 15: 1, + 2: 3, + 26: 0, + 12: 1, + 10: 3, + 14: 2, + 6: 3, + 4: 3, + 13: 1, + 7: 3, + 8: 3, + 5: 0, + 19: 3, + 29: 0, + 25: 2, + 22: 1, + 21: 1, + 27: 1, + 24: 3, + 17: 3, + 52: 1, + 42: 2, + 16: 3, + 58: 0, + 62: 0}, + 9:{ + 63: 1, + 45: 1, + 31: 0, + 32: 1, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 1, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 1, + 9: 0, + 20: 2, + 11: 3, + 3: 3, + 23: 1, + 15: 3, + 2: 3, + 26: 0, + 12: 3, + 10: 3, + 14: 2, + 6: 3, + 4: 3, + 13: 2, + 7: 3, + 8: 3, + 5: 3, + 19: 2, + 29: 0, + 25: 2, + 22: 2, + 21: 3, + 27: 2, + 24: 1, + 17: 3, + 52: 1, + 42: 2, + 16: 3, + 58: 0, + 62: 0}, + 20:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 1, + 9: 2, + 20: 1, + 11: 2, + 3: 3, + 23: 0, + 15: 1, + 2: 3, + 26: 0, + 12: 1, + 10: 3, + 14: 1, + 6: 3, + 4: 3, + 13: 1, + 7: 3, + 8: 2, + 5: 2, + 19: 3, + 29: 1, + 25: 1, + 22: 0, + 21: 1, + 27: 0, + 24: 0, + 17: 3, + 52: 1, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 11:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 2, + 9: 3, + 20: 2, + 11: 2, + 3: 3, + 23: 3, + 15: 2, + 2: 3, + 26: 0, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 3, + 13: 3, + 7: 3, + 8: 3, + 5: 1, + 19: 3, + 29: 1, + 25: 2, + 22: 2, + 21: 2, + 27: 1, + 24: 1, + 17: 3, + 52: 1, + 42: 1, + 16: 3, + 58: 0, + 62: 0}, + 3:{ + 63: 0, + 45: 1, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 3, + 9: 3, + 20: 3, + 11: 3, + 3: 2, + 23: 3, + 15: 3, + 2: 2, + 26: 3, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 3, + 13: 3, + 7: 3, + 8: 3, + 5: 3, + 19: 2, + 29: 3, + 25: 3, + 22: 3, + 21: 3, + 27: 3, + 24: 3, + 17: 1, + 52: 0, + 42: 1, + 16: 3, + 58: 0, + 62: 0}, + 23:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 3, + 9: 2, + 20: 1, + 11: 3, + 3: 3, + 23: 0, + 15: 0, + 2: 3, + 26: 0, + 12: 2, + 10: 1, + 14: 1, + 6: 3, + 4: 2, + 13: 1, + 7: 1, + 8: 1, + 5: 1, + 19: 2, + 29: 0, + 25: 0, + 22: 1, + 21: 1, + 27: 0, + 24: 0, + 17: 2, + 52: 0, + 42: 0, + 16: 1, + 58: 0, + 62: 0}, + 15:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 3, + 9: 3, + 20: 3, + 11: 3, + 3: 3, + 23: 1, + 15: 1, + 2: 3, + 26: 0, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 3, + 13: 3, + 7: 3, + 8: 3, + 5: 3, + 19: 3, + 29: 1, + 25: 2, + 22: 2, + 21: 2, + 27: 2, + 24: 1, + 17: 2, + 52: 1, + 42: 1, + 16: 2, + 58: 0, + 62: 0}, + 2:{ + 63: 1, + 45: 1, + 31: 0, + 32: 0, + 35: 0, + 43: 1, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 1, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 1, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 1, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 3, + 9: 3, + 20: 3, + 11: 3, + 3: 3, + 23: 3, + 15: 3, + 2: 3, + 26: 3, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 3, + 13: 3, + 7: 3, + 8: 3, + 5: 3, + 19: 2, + 29: 3, + 25: 3, + 22: 3, + 21: 3, + 27: 3, + 24: 3, + 17: 2, + 52: 0, + 42: 1, + 16: 3, + 58: 0, + 62: 0}, + 26:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 1, + 18: 2, + 9: 2, + 20: 1, + 11: 2, + 3: 2, + 23: 0, + 15: 2, + 2: 1, + 26: 0, + 12: 3, + 10: 2, + 14: 2, + 6: 3, + 4: 2, + 13: 1, + 7: 2, + 8: 3, + 5: 3, + 19: 1, + 29: 2, + 25: 1, + 22: 2, + 21: 2, + 27: 1, + 24: 1, + 17: 1, + 52: 0, + 42: 0, + 16: 1, + 58: 0, + 62: 0}, + 12:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 1, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 1, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 1, + 9: 3, + 20: 2, + 11: 1, + 3: 3, + 23: 0, + 15: 2, + 2: 3, + 26: 0, + 12: 1, + 10: 3, + 14: 2, + 6: 3, + 4: 3, + 13: 1, + 7: 3, + 8: 3, + 5: 3, + 19: 3, + 29: 1, + 25: 1, + 22: 3, + 21: 2, + 27: 1, + 24: 0, + 17: 3, + 52: 1, + 42: 2, + 16: 1, + 58: 0, + 62: 0}, + 10:{ + 63: 1, + 45: 1, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 1, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 3, + 9: 3, + 20: 3, + 11: 2, + 3: 3, + 23: 3, + 15: 2, + 2: 3, + 26: 0, + 12: 3, + 10: 1, + 14: 2, + 6: 3, + 4: 3, + 13: 2, + 7: 2, + 8: 3, + 5: 3, + 19: 3, + 29: 2, + 25: 2, + 22: 2, + 21: 2, + 27: 2, + 24: 1, + 17: 3, + 52: 2, + 42: 3, + 16: 3, + 58: 0, + 62: 0}, + 14:{ + 63: 1, + 45: 0, + 31: 1, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 3, + 9: 3, + 20: 1, + 11: 1, + 3: 3, + 23: 1, + 15: 1, + 2: 3, + 26: 0, + 12: 2, + 10: 3, + 14: 1, + 6: 3, + 4: 3, + 13: 3, + 7: 2, + 8: 2, + 5: 1, + 19: 3, + 29: 2, + 25: 1, + 22: 2, + 21: 2, + 27: 2, + 24: 1, + 17: 3, + 52: 1, + 42: 2, + 16: 3, + 58: 0, + 62: 0}, + 6:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 1, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 2, + 9: 2, + 20: 3, + 11: 3, + 3: 3, + 23: 2, + 15: 2, + 2: 3, + 26: 0, + 12: 3, + 10: 2, + 14: 1, + 6: 3, + 4: 3, + 13: 1, + 7: 2, + 8: 3, + 5: 3, + 19: 3, + 29: 3, + 25: 2, + 22: 3, + 21: 3, + 27: 2, + 24: 1, + 17: 3, + 52: 2, + 42: 2, + 16: 3, + 58: 0, + 62: 0}, + 4:{ + 63: 0, + 45: 1, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 2, + 18: 3, + 9: 3, + 20: 3, + 11: 3, + 3: 3, + 23: 3, + 15: 3, + 2: 3, + 26: 3, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 2, + 13: 3, + 7: 3, + 8: 3, + 5: 3, + 19: 2, + 29: 3, + 25: 3, + 22: 3, + 21: 3, + 27: 3, + 24: 3, + 17: 1, + 52: 0, + 42: 1, + 16: 3, + 58: 0, + 62: 0}, + 13:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 1, + 9: 2, + 20: 1, + 11: 1, + 3: 3, + 23: 0, + 15: 1, + 2: 3, + 26: 1, + 12: 2, + 10: 3, + 14: 1, + 6: 2, + 4: 3, + 13: 1, + 7: 3, + 8: 2, + 5: 2, + 19: 3, + 29: 1, + 25: 1, + 22: 2, + 21: 2, + 27: 1, + 24: 1, + 17: 3, + 52: 1, + 42: 2, + 16: 2, + 58: 0, + 62: 0}, + 7:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 3, + 9: 3, + 20: 3, + 11: 3, + 3: 3, + 23: 3, + 15: 2, + 2: 3, + 26: 0, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 3, + 13: 2, + 7: 1, + 8: 3, + 5: 3, + 19: 3, + 29: 2, + 25: 3, + 22: 3, + 21: 2, + 27: 3, + 24: 1, + 17: 3, + 52: 1, + 42: 2, + 16: 3, + 58: 0, + 62: 0}, + 8:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 2, + 9: 3, + 20: 2, + 11: 2, + 3: 3, + 23: 0, + 15: 1, + 2: 3, + 26: 0, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 3, + 13: 3, + 7: 3, + 8: 1, + 5: 3, + 19: 3, + 29: 2, + 25: 2, + 22: 2, + 21: 2, + 27: 2, + 24: 0, + 17: 3, + 52: 2, + 42: 2, + 16: 3, + 58: 0, + 62: 0}, + 5:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 3, + 9: 3, + 20: 2, + 11: 2, + 3: 3, + 23: 1, + 15: 1, + 2: 3, + 26: 0, + 12: 3, + 10: 3, + 14: 2, + 6: 3, + 4: 3, + 13: 2, + 7: 3, + 8: 3, + 5: 3, + 19: 3, + 29: 1, + 25: 2, + 22: 2, + 21: 2, + 27: 1, + 24: 1, + 17: 3, + 52: 2, + 42: 2, + 16: 3, + 58: 0, + 62: 0}, + 19:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 3, + 9: 3, + 20: 3, + 11: 3, + 3: 2, + 23: 3, + 15: 3, + 2: 2, + 26: 2, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 2, + 13: 3, + 7: 3, + 8: 3, + 5: 3, + 19: 1, + 29: 2, + 25: 2, + 22: 2, + 21: 3, + 27: 3, + 24: 2, + 17: 1, + 52: 0, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 29:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 1, + 9: 1, + 20: 1, + 11: 0, + 3: 3, + 23: 0, + 15: 0, + 2: 3, + 26: 0, + 12: 2, + 10: 2, + 14: 1, + 6: 1, + 4: 3, + 13: 0, + 7: 2, + 8: 2, + 5: 2, + 19: 2, + 29: 0, + 25: 1, + 22: 0, + 21: 1, + 27: 1, + 24: 0, + 17: 2, + 52: 2, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 25:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 1, + 9: 3, + 20: 0, + 11: 1, + 3: 2, + 23: 0, + 15: 1, + 2: 3, + 26: 0, + 12: 1, + 10: 2, + 14: 2, + 6: 3, + 4: 3, + 13: 1, + 7: 3, + 8: 1, + 5: 2, + 19: 3, + 29: 0, + 25: 1, + 22: 0, + 21: 1, + 27: 0, + 24: 0, + 17: 2, + 52: 0, + 42: 1, + 16: 1, + 58: 0, + 62: 0}, + 22:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 1, + 9: 2, + 20: 1, + 11: 1, + 3: 3, + 23: 0, + 15: 1, + 2: 3, + 26: 0, + 12: 2, + 10: 1, + 14: 1, + 6: 1, + 4: 2, + 13: 1, + 7: 1, + 8: 1, + 5: 1, + 19: 2, + 29: 1, + 25: 1, + 22: 1, + 21: 1, + 27: 1, + 24: 1, + 17: 2, + 52: 1, + 42: 0, + 16: 2, + 58: 0, + 62: 0}, + 21:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 1, + 9: 3, + 20: 1, + 11: 0, + 3: 3, + 23: 1, + 15: 0, + 2: 3, + 26: 0, + 12: 3, + 10: 2, + 14: 2, + 6: 3, + 4: 3, + 13: 0, + 7: 2, + 8: 0, + 5: 2, + 19: 3, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 1, + 24: 0, + 17: 2, + 52: 0, + 42: 1, + 16: 0, + 58: 0, + 62: 0}, + 27:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 0, + 9: 2, + 20: 0, + 11: 1, + 3: 3, + 23: 0, + 15: 0, + 2: 3, + 26: 0, + 12: 3, + 10: 2, + 14: 1, + 6: 3, + 4: 2, + 13: 2, + 7: 1, + 8: 0, + 5: 1, + 19: 2, + 29: 1, + 25: 0, + 22: 0, + 21: 1, + 27: 0, + 24: 0, + 17: 2, + 52: 1, + 42: 1, + 16: 0, + 58: 0, + 62: 0}, + 24:{ + 63: 1, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 3, + 18: 0, + 9: 1, + 20: 0, + 11: 0, + 3: 3, + 23: 0, + 15: 0, + 2: 3, + 26: 0, + 12: 1, + 10: 0, + 14: 0, + 6: 2, + 4: 3, + 13: 0, + 7: 1, + 8: 0, + 5: 2, + 19: 3, + 29: 0, + 25: 0, + 22: 1, + 21: 0, + 27: 0, + 24: 0, + 17: 1, + 52: 0, + 42: 0, + 16: 2, + 58: 0, + 62: 0}, + 17:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 1, + 18: 3, + 9: 3, + 20: 3, + 11: 3, + 3: 2, + 23: 3, + 15: 3, + 2: 1, + 26: 2, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 3, + 13: 3, + 7: 3, + 8: 3, + 5: 3, + 19: 1, + 29: 1, + 25: 2, + 22: 2, + 21: 3, + 27: 2, + 24: 3, + 17: 0, + 52: 0, + 42: 2, + 16: 0, + 58: 0, + 62: 0}, + 52:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 1, + 23: 0, + 15: 0, + 2: 0, + 26: 0, + 12: 1, + 10: 0, + 14: 0, + 6: 1, + 4: 3, + 13: 0, + 7: 0, + 8: 0, + 5: 1, + 19: 0, + 29: 0, + 25: 0, + 22: 1, + 21: 0, + 27: 0, + 24: 0, + 17: 0, + 52: 0, + 42: 1, + 16: 0, + 58: 0, + 62: 0}, + 42:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 1, + 18: 2, + 9: 1, + 20: 2, + 11: 2, + 3: 1, + 23: 2, + 15: 2, + 2: 1, + 26: 1, + 12: 2, + 10: 2, + 14: 2, + 6: 2, + 4: 1, + 13: 1, + 7: 2, + 8: 2, + 5: 2, + 19: 1, + 29: 1, + 25: 1, + 22: 2, + 21: 3, + 27: 1, + 24: 1, + 17: 1, + 52: 0, + 42: 0, + 16: 1, + 58: 0, + 62: 0}, + 16:{ + 63: 0, + 45: 1, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 3, + 9: 3, + 20: 2, + 11: 3, + 3: 2, + 23: 1, + 15: 2, + 2: 1, + 26: 2, + 12: 3, + 10: 3, + 14: 3, + 6: 3, + 4: 1, + 13: 2, + 7: 2, + 8: 3, + 5: 3, + 19: 1, + 29: 1, + 25: 3, + 22: 2, + 21: 1, + 27: 1, + 24: 2, + 17: 0, + 52: 0, + 42: 0, + 16: 1, + 58: 0, + 62: 0}, + 58:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 0, + 23: 0, + 15: 0, + 2: 0, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 0, + 13: 0, + 7: 0, + 8: 0, + 5: 0, + 19: 0, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 0, + 52: 0, + 42: 0, + 16: 0, + 58: 0, + 62: 0}, + 62:{ + 63: 0, + 45: 0, + 31: 0, + 32: 0, + 35: 0, + 43: 0, + 37: 0, + 44: 0, + 55: 0, + 47: 0, + 40: 0, + 59: 0, + 33: 0, + 46: 0, + 38: 0, + 36: 0, + 41: 0, + 30: 0, + 39: 0, + 28: 0, + 34: 0, + 51: 0, + 48: 0, + 49: 0, + 53: 0, + 50: 0, + 54: 0, + 57: 0, + 61: 0, + 60: 0, + 56: 0, + 1: 0, + 18: 0, + 9: 0, + 20: 0, + 11: 0, + 3: 0, + 23: 0, + 15: 0, + 2: 0, + 26: 0, + 12: 0, + 10: 0, + 14: 0, + 6: 0, + 4: 0, + 13: 0, + 7: 0, + 8: 0, + 5: 0, + 19: 0, + 29: 0, + 25: 0, + 22: 0, + 21: 0, + 27: 0, + 24: 0, + 17: 0, + 52: 0, + 42: 0, + 16: 0, + 58: 0, + 62: 0}} +ISO_8859_5_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 77, + 66: 90, + 67: 99, + 68: 100, + 69: 72, + 70: 109, + 71: 107, + 72: 101, + 73: 79, + 74: 185, + 75: 81, + 76: 102, + 77: 76, + 78: 94, + 79: 82, + 80: 110, + 81: 186, + 82: 108, + 83: 91, + 84: 74, + 85: 119, + 86: 84, + 87: 96, + 88: 111, + 89: 187, + 90: 115, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 65, + 98: 69, + 99: 70, + 100: 66, + 101: 63, + 102: 68, + 103: 112, + 104: 103, + 105: 92, + 106: 194, + 107: 104, + 108: 95, + 109: 86, + 110: 87, + 111: 71, + 112: 116, + 113: 195, + 114: 85, + 115: 93, + 116: 97, + 117: 113, + 118: 196, + 119: 197, + 120: 198, + 121: 199, + 122: 200, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 194, + 129: 195, + 130: 196, + 131: 197, + 132: 198, + 133: 199, + 134: 200, + 135: 201, + 136: 202, + 137: 203, + 138: 204, + 139: 205, + 140: 206, + 141: 207, + 142: 208, + 143: 209, + 144: 210, + 145: 211, + 146: 212, + 147: 213, + 148: 214, + 149: 215, + 150: 216, + 151: 217, + 152: 218, + 153: 219, + 154: 220, + 155: 221, + 156: 222, + 157: 223, + 158: 224, + 159: 225, + 160: 81, + 161: 226, + 162: 227, + 163: 228, + 164: 229, + 165: 230, + 166: 105, + 167: 231, + 168: 232, + 169: 233, + 170: 234, + 171: 235, + 172: 236, + 173: 45, + 174: 237, + 175: 238, + 176: 31, + 177: 32, + 178: 35, + 179: 43, + 180: 37, + 181: 44, + 182: 55, + 183: 47, + 184: 40, + 185: 59, + 186: 33, + 187: 46, + 188: 38, + 189: 36, + 190: 41, + 191: 30, + 192: 39, + 193: 28, + 194: 34, + 195: 51, + 196: 48, + 197: 49, + 198: 53, + 199: 50, + 200: 54, + 201: 57, + 202: 61, + 203: 239, + 204: 67, + 205: 240, + 206: 60, + 207: 56, + 208: 1, + 209: 18, + 210: 9, + 211: 20, + 212: 11, + 213: 3, + 214: 23, + 215: 15, + 216: 2, + 217: 26, + 218: 12, + 219: 10, + 220: 14, + 221: 6, + 222: 4, + 223: 13, + 224: 7, + 225: 8, + 226: 5, + 227: 19, + 228: 29, + 229: 25, + 230: 22, + 231: 21, + 232: 27, + 233: 24, + 234: 17, + 235: 75, + 236: 52, + 237: 241, + 238: 42, + 239: 16, + 240: 62, + 241: 242, + 242: 243, + 243: 244, + 244: 58, + 245: 245, + 246: 98, + 247: 246, + 248: 247, + 249: 248, + 250: 249, + 251: 250, + 252: 251, + 253: 91, + 254: 252, + 255: 253} +ISO_8859_5_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name="ISO-8859-5", language="Bulgarian", + char_to_order_map=ISO_8859_5_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя") +WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 77, + 66: 90, + 67: 99, + 68: 100, + 69: 72, + 70: 109, + 71: 107, + 72: 101, + 73: 79, + 74: 185, + 75: 81, + 76: 102, + 77: 76, + 78: 94, + 79: 82, + 80: 110, + 81: 186, + 82: 108, + 83: 91, + 84: 74, + 85: 119, + 86: 84, + 87: 96, + 88: 111, + 89: 187, + 90: 115, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 65, + 98: 69, + 99: 70, + 100: 66, + 101: 63, + 102: 68, + 103: 112, + 104: 103, + 105: 92, + 106: 194, + 107: 104, + 108: 95, + 109: 86, + 110: 87, + 111: 71, + 112: 116, + 113: 195, + 114: 85, + 115: 93, + 116: 97, + 117: 113, + 118: 196, + 119: 197, + 120: 198, + 121: 199, + 122: 200, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 206, + 129: 207, + 130: 208, + 131: 209, + 132: 210, + 133: 211, + 134: 212, + 135: 213, + 136: 120, + 137: 214, + 138: 215, + 139: 216, + 140: 217, + 141: 218, + 142: 219, + 143: 220, + 144: 221, + 145: 78, + 146: 64, + 147: 83, + 148: 121, + 149: 98, + 150: 117, + 151: 105, + 152: 222, + 153: 223, + 154: 224, + 155: 225, + 156: 226, + 157: 227, + 158: 228, + 159: 229, + 160: 88, + 161: 230, + 162: 231, + 163: 232, + 164: 233, + 165: 122, + 166: 89, + 167: 106, + 168: 234, + 169: 235, + 170: 236, + 171: 237, + 172: 238, + 173: 45, + 174: 239, + 175: 240, + 176: 73, + 177: 80, + 178: 118, + 179: 114, + 180: 241, + 181: 242, + 182: 243, + 183: 244, + 184: 245, + 185: 62, + 186: 58, + 187: 246, + 188: 247, + 189: 248, + 190: 249, + 191: 250, + 192: 31, + 193: 32, + 194: 35, + 195: 43, + 196: 37, + 197: 44, + 198: 55, + 199: 47, + 200: 40, + 201: 59, + 202: 33, + 203: 46, + 204: 38, + 205: 36, + 206: 41, + 207: 30, + 208: 39, + 209: 28, + 210: 34, + 211: 51, + 212: 48, + 213: 49, + 214: 53, + 215: 50, + 216: 54, + 217: 57, + 218: 61, + 219: 251, + 220: 67, + 221: 252, + 222: 60, + 223: 56, + 224: 1, + 225: 18, + 226: 9, + 227: 20, + 228: 11, + 229: 3, + 230: 23, + 231: 15, + 232: 2, + 233: 26, + 234: 12, + 235: 10, + 236: 14, + 237: 6, + 238: 4, + 239: 13, + 240: 7, + 241: 8, + 242: 5, + 243: 19, + 244: 29, + 245: 25, + 246: 22, + 247: 21, + 248: 27, + 249: 24, + 250: 17, + 251: 75, + 252: 52, + 253: 253, + 254: 42, + 255: 16} +WINDOWS_1251_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name="windows-1251", language="Bulgarian", + char_to_order_map=WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя") diff --git a/APPS_UNCOMPILED/lib/chardet/langgreekmodel.py b/APPS_UNCOMPILED/lib/chardet/langgreekmodel.py new file mode 100644 index 0000000..0dabe94 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/langgreekmodel.py @@ -0,0 +1,4316 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/langgreekmodel.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 99559 bytes +from chardet.sbcharsetprober import SingleByteCharSetModel +GREEK_LANG_MODEL = {60:{ + 60: 2, + 55: 1, + 58: 2, + 36: 1, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 1, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 0, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 55:{ + 60: 0, + 55: 2, + 58: 2, + 36: 1, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 1, + 30: 0, + 4: 0, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 1, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 58:{ + 60: 2, + 55: 1, + 58: 1, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 1, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 36:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 0, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 61:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 1, + 21: 2, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 2, + 10: 0, + 6: 0, + 30: 0, + 4: 0, + 9: 1, + 8: 2, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 46:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 2, + 20: 2, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 2, + 16: 2, + 10: 0, + 6: 3, + 30: 2, + 4: 0, + 9: 2, + 8: 2, + 14: 0, + 7: 1, + 2: 2, + 12: 0, + 28: 2, + 23: 3, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 54:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 2, + 10: 2, + 6: 2, + 30: 0, + 4: 0, + 9: 2, + 8: 0, + 14: 0, + 7: 2, + 2: 3, + 12: 0, + 28: 0, + 23: 2, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 31:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 2, + 43: 2, + 41: 1, + 34: 0, + 40: 0, + 52: 2, + 47: 2, + 44: 2, + 53: 2, + 38: 2, + 49: 2, + 59: 1, + 39: 0, + 35: 2, + 48: 2, + 37: 2, + 33: 2, + 45: 2, + 56: 2, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 2, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 1, + 5: 0, + 11: 2, + 16: 3, + 10: 2, + 6: 3, + 30: 2, + 4: 0, + 9: 3, + 8: 3, + 14: 2, + 7: 2, + 2: 0, + 12: 3, + 28: 2, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 2, + 27: 0}, + 51:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 0, + 41: 0, + 34: 1, + 40: 1, + 52: 0, + 47: 1, + 44: 0, + 53: 1, + 38: 0, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 2, + 22: 2, + 15: 0, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 0, + 25: 0, + 5: 2, + 11: 0, + 16: 2, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 0, + 8: 2, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 43:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 1, + 51: 0, + 43: 2, + 41: 0, + 34: 2, + 40: 1, + 52: 0, + 47: 2, + 44: 1, + 53: 1, + 38: 0, + 49: 0, + 59: 0, + 39: 1, + 35: 0, + 48: 2, + 37: 0, + 33: 0, + 45: 2, + 56: 0, + 50: 1, + 57: 2, + 17: 0, + 18: 0, + 22: 0, + 15: 2, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 0, + 25: 0, + 5: 3, + 11: 0, + 16: 2, + 10: 0, + 6: 2, + 30: 0, + 4: 0, + 9: 0, + 8: 2, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 41:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 2, + 40: 2, + 52: 0, + 47: 2, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 2, + 17: 0, + 18: 0, + 22: 2, + 15: 2, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 2, + 25: 0, + 5: 3, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 0, + 8: 2, + 14: 0, + 7: 0, + 2: 0, + 12: 2, + 28: 0, + 23: 0, + 42: 0, + 24: 2, + 19: 1, + 26: 2, + 27: 2}, + 34:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 2, + 41: 2, + 34: 0, + 40: 0, + 52: 0, + 47: 2, + 44: 2, + 53: 2, + 38: 2, + 49: 2, + 59: 1, + 39: 0, + 35: 2, + 48: 2, + 37: 2, + 33: 2, + 45: 2, + 56: 0, + 50: 2, + 57: 2, + 17: 3, + 18: 0, + 22: 0, + 15: 3, + 1: 0, + 29: 0, + 20: 3, + 21: 2, + 3: 1, + 32: 0, + 13: 0, + 25: 1, + 5: 2, + 11: 3, + 16: 3, + 10: 2, + 6: 3, + 30: 2, + 4: 0, + 9: 3, + 8: 2, + 14: 0, + 7: 2, + 2: 2, + 12: 2, + 28: 2, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 1, + 27: 0}, + 40:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 1, + 41: 0, + 34: 0, + 40: 0, + 52: 2, + 47: 0, + 44: 2, + 53: 0, + 38: 2, + 49: 2, + 59: 0, + 39: 0, + 35: 2, + 48: 2, + 37: 2, + 33: 2, + 45: 1, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 2, + 10: 0, + 6: 1, + 30: 0, + 4: 0, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 1, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 52:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 0, + 41: 0, + 34: 2, + 40: 2, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 1, + 37: 0, + 33: 0, + 45: 1, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 2, + 22: 0, + 15: 0, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 0, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 2, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 2, + 27: 0}, + 47:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 1, + 43: 1, + 41: 2, + 34: 2, + 40: 2, + 52: 0, + 47: 0, + 44: 2, + 53: 2, + 38: 2, + 49: 2, + 59: 0, + 39: 2, + 35: 0, + 48: 2, + 37: 2, + 33: 2, + 45: 0, + 56: 2, + 50: 0, + 57: 2, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 2, + 29: 0, + 20: 0, + 21: 2, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 1, + 30: 0, + 4: 2, + 9: 0, + 8: 0, + 14: 0, + 7: 2, + 2: 1, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 1, + 19: 0, + 26: 0, + 27: 0}, + 44:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 0, + 41: 1, + 34: 2, + 40: 2, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 1, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 2, + 37: 0, + 33: 1, + 45: 2, + 56: 0, + 50: 0, + 57: 1, + 17: 3, + 18: 0, + 22: 0, + 15: 0, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 0, + 25: 0, + 5: 2, + 11: 0, + 16: 2, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 0, + 8: 2, + 14: 0, + 7: 0, + 2: 0, + 12: 2, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 2, + 26: 2, + 27: 2}, + 53:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 0, + 41: 0, + 34: 2, + 40: 2, + 52: 0, + 47: 2, + 44: 0, + 53: 2, + 38: 0, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 0, + 37: 2, + 33: 0, + 45: 2, + 56: 0, + 50: 0, + 57: 2, + 17: 2, + 18: 2, + 22: 0, + 15: 2, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 0, + 25: 0, + 5: 1, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 2, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 2, + 26: 2, + 27: 0}, + 38:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 2, + 43: 0, + 41: 0, + 34: 2, + 40: 2, + 52: 0, + 47: 2, + 44: 0, + 53: 0, + 38: 2, + 49: 0, + 59: 0, + 39: 2, + 35: 2, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 2, + 22: 2, + 15: 2, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 2, + 25: 0, + 5: 3, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 3, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 2, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 2, + 26: 0, + 27: 0}, + 49:{ + 60: 2, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 0, + 41: 0, + 34: 2, + 40: 2, + 52: 0, + 47: 2, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 0, + 37: 0, + 33: 2, + 45: 0, + 56: 0, + 50: 0, + 57: 2, + 17: 0, + 18: 2, + 22: 0, + 15: 2, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 1, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 1, + 19: 2, + 26: 0, + 27: 0}, + 59:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 1, + 40: 1, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 1, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 2, + 22: 0, + 15: 0, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 0, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 39:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 1, + 43: 2, + 41: 2, + 34: 2, + 40: 1, + 52: 2, + 47: 2, + 44: 2, + 53: 2, + 38: 2, + 49: 2, + 59: 0, + 39: 0, + 35: 2, + 48: 2, + 37: 2, + 33: 2, + 45: 2, + 56: 2, + 50: 2, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 2, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 3, + 11: 2, + 16: 2, + 10: 2, + 6: 2, + 30: 0, + 4: 0, + 9: 2, + 8: 2, + 14: 0, + 7: 0, + 2: 2, + 12: 2, + 28: 1, + 23: 1, + 42: 0, + 24: 0, + 19: 0, + 26: 2, + 27: 0}, + 35:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 0, + 41: 0, + 34: 2, + 40: 0, + 52: 0, + 47: 2, + 44: 0, + 53: 2, + 38: 1, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 2, + 37: 0, + 33: 1, + 45: 0, + 56: 0, + 50: 1, + 57: 2, + 17: 2, + 18: 1, + 22: 1, + 15: 2, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 2, + 25: 0, + 5: 2, + 11: 0, + 16: 2, + 10: 0, + 6: 2, + 30: 0, + 4: 3, + 9: 0, + 8: 3, + 14: 0, + 7: 0, + 2: 0, + 12: 2, + 28: 0, + 23: 2, + 42: 0, + 24: 2, + 19: 2, + 26: 0, + 27: 3}, + 48:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 1, + 41: 1, + 34: 2, + 40: 2, + 52: 0, + 47: 2, + 44: 0, + 53: 0, + 38: 0, + 49: 2, + 59: 0, + 39: 2, + 35: 0, + 48: 2, + 37: 0, + 33: 1, + 45: 1, + 56: 0, + 50: 1, + 57: 1, + 17: 0, + 18: 0, + 22: 0, + 15: 2, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 1, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 3, + 28: 0, + 23: 0, + 42: 0, + 24: 2, + 19: 0, + 26: 2, + 27: 0}, + 37:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 0, + 41: 1, + 34: 2, + 40: 2, + 52: 0, + 47: 2, + 44: 2, + 53: 0, + 38: 2, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 0, + 37: 2, + 33: 2, + 45: 2, + 56: 0, + 50: 2, + 57: 2, + 17: 0, + 18: 0, + 22: 2, + 15: 2, + 1: 2, + 29: 2, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 2, + 11: 2, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 2, + 8: 0, + 14: 0, + 7: 0, + 2: 3, + 12: 3, + 28: 0, + 23: 2, + 42: 0, + 24: 2, + 19: 0, + 26: 2, + 27: 2}, + 33:{ + 60: 0, + 55: 1, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 2, + 51: 0, + 43: 0, + 41: 0, + 34: 2, + 40: 2, + 52: 0, + 47: 2, + 44: 2, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 2, + 37: 0, + 33: 1, + 45: 1, + 56: 0, + 50: 0, + 57: 2, + 17: 2, + 18: 2, + 22: 0, + 15: 2, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 2, + 25: 0, + 5: 2, + 11: 0, + 16: 0, + 10: 2, + 6: 0, + 30: 0, + 4: 3, + 9: 0, + 8: 2, + 14: 0, + 7: 2, + 2: 0, + 12: 2, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 2, + 26: 2, + 27: 3}, + 45:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 2, + 41: 0, + 34: 1, + 40: 2, + 52: 2, + 47: 0, + 44: 0, + 53: 1, + 38: 2, + 49: 2, + 59: 0, + 39: 0, + 35: 2, + 48: 1, + 37: 2, + 33: 2, + 45: 0, + 56: 0, + 50: 1, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 2, + 10: 0, + 6: 0, + 30: 0, + 4: 0, + 9: 3, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 56:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 1, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 1, + 52: 0, + 47: 2, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 2, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 0, + 25: 0, + 5: 2, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 2, + 12: 2, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 1, + 27: 1}, + 50:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 1, + 51: 0, + 43: 0, + 41: 0, + 34: 2, + 40: 2, + 52: 0, + 47: 2, + 44: 0, + 53: 0, + 38: 0, + 49: 1, + 59: 0, + 39: 1, + 35: 0, + 48: 2, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 1, + 57: 1, + 17: 2, + 18: 0, + 22: 0, + 15: 0, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 2, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 0, + 8: 3, + 14: 0, + 7: 0, + 2: 2, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 2, + 19: 0, + 26: 0, + 27: 0}, + 57:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 1, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 1, + 38: 0, + 49: 2, + 59: 0, + 39: 0, + 35: 0, + 48: 2, + 37: 2, + 33: 2, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 0, + 9: 0, + 8: 2, + 14: 2, + 7: 2, + 2: 0, + 12: 0, + 28: 0, + 23: 1, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 17:{ + 60: 0, + 55: 0, + 58: 0, + 36: 2, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 3, + 20: 3, + 21: 3, + 3: 3, + 32: 3, + 13: 0, + 25: 3, + 5: 2, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 3, + 4: 0, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 3, + 23: 3, + 42: 3, + 24: 2, + 19: 0, + 26: 0, + 27: 0}, + 18:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 3, + 29: 2, + 20: 3, + 21: 2, + 3: 3, + 32: 2, + 13: 0, + 25: 3, + 5: 0, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 3, + 4: 3, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 3, + 23: 3, + 42: 3, + 24: 2, + 19: 0, + 26: 0, + 27: 0}, + 22:{ + 60: 0, + 55: 0, + 58: 0, + 36: 1, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 3, + 21: 3, + 3: 0, + 32: 0, + 13: 0, + 25: 3, + 5: 0, + 11: 3, + 16: 2, + 10: 3, + 6: 3, + 30: 2, + 4: 0, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 2, + 23: 3, + 42: 2, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 15:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 3, + 29: 2, + 20: 3, + 21: 3, + 3: 3, + 32: 3, + 13: 3, + 25: 3, + 5: 0, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 3, + 4: 3, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 1, + 23: 3, + 42: 2, + 24: 3, + 19: 0, + 26: 0, + 27: 0}, + 1:{ + 60: 0, + 55: 0, + 58: 0, + 36: 2, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 2, + 22: 0, + 15: 3, + 1: 0, + 29: 3, + 20: 3, + 21: 3, + 3: 2, + 32: 3, + 13: 1, + 25: 3, + 5: 3, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 3, + 4: 2, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 3, + 28: 3, + 23: 3, + 42: 2, + 24: 0, + 19: 2, + 26: 2, + 27: 0}, + 29:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 2, + 22: 3, + 15: 2, + 1: 3, + 29: 0, + 20: 2, + 21: 2, + 3: 3, + 32: 0, + 13: 2, + 25: 0, + 5: 3, + 11: 0, + 16: 3, + 10: 0, + 6: 0, + 30: 0, + 4: 3, + 9: 0, + 8: 3, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 2, + 19: 2, + 26: 2, + 27: 2}, + 20:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 0, + 20: 3, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 3, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 3, + 4: 3, + 9: 0, + 8: 3, + 14: 0, + 7: 0, + 2: 0, + 12: 2, + 28: 0, + 23: 3, + 42: 0, + 24: 3, + 19: 3, + 26: 2, + 27: 3}, + 21:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 3, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 3, + 9: 0, + 8: 3, + 14: 0, + 7: 0, + 2: 0, + 12: 3, + 28: 0, + 23: 0, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 3}, + 3:{ + 60: 0, + 55: 0, + 58: 0, + 36: 2, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 0, + 22: 0, + 15: 3, + 1: 2, + 29: 3, + 20: 3, + 21: 3, + 3: 2, + 32: 2, + 13: 0, + 25: 3, + 5: 3, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 3, + 4: 2, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 3, + 28: 3, + 23: 3, + 42: 2, + 24: 3, + 19: 2, + 26: 3, + 27: 2}, + 32:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 2, + 22: 2, + 15: 2, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 2, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 3, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 1, + 28: 0, + 23: 0, + 42: 0, + 24: 3, + 19: 2, + 26: 0, + 27: 2}, + 13:{ + 60: 0, + 55: 0, + 58: 0, + 36: 2, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 3, + 21: 2, + 3: 0, + 32: 0, + 13: 0, + 25: 3, + 5: 0, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 2, + 4: 0, + 9: 2, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 2, + 23: 3, + 42: 2, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 25:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 3, + 22: 3, + 15: 2, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 3, + 11: 0, + 16: 1, + 10: 3, + 6: 2, + 30: 0, + 4: 3, + 9: 0, + 8: 3, + 14: 0, + 7: 0, + 2: 0, + 12: 3, + 28: 0, + 23: 0, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 3}, + 5:{ + 60: 0, + 55: 1, + 58: 0, + 36: 2, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 1, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 0, + 1: 3, + 29: 3, + 20: 3, + 21: 3, + 3: 3, + 32: 2, + 13: 3, + 25: 3, + 5: 0, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 3, + 4: 3, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 2, + 23: 3, + 42: 2, + 24: 3, + 19: 3, + 26: 0, + 27: 3}, + 11:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 0, + 20: 0, + 21: 3, + 3: 3, + 32: 0, + 13: 3, + 25: 2, + 5: 3, + 11: 3, + 16: 3, + 10: 3, + 6: 2, + 30: 0, + 4: 3, + 9: 2, + 8: 3, + 14: 0, + 7: 0, + 2: 3, + 12: 3, + 28: 2, + 23: 2, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 3}, + 16:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 1, + 20: 2, + 21: 1, + 3: 3, + 32: 0, + 13: 3, + 25: 2, + 5: 3, + 11: 2, + 16: 3, + 10: 2, + 6: 2, + 30: 0, + 4: 3, + 9: 3, + 8: 0, + 14: 0, + 7: 0, + 2: 3, + 12: 3, + 28: 2, + 23: 0, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 3}, + 10:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 1, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 3, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 3, + 11: 0, + 16: 0, + 10: 3, + 6: 3, + 30: 0, + 4: 3, + 9: 3, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 2, + 28: 3, + 23: 0, + 42: 2, + 24: 3, + 19: 3, + 26: 2, + 27: 2}, + 6:{ + 60: 0, + 55: 0, + 58: 0, + 36: 2, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 0, + 20: 0, + 21: 3, + 3: 3, + 32: 2, + 13: 3, + 25: 3, + 5: 3, + 11: 0, + 16: 1, + 10: 0, + 6: 2, + 30: 0, + 4: 3, + 9: 0, + 8: 0, + 14: 0, + 7: 3, + 2: 3, + 12: 3, + 28: 0, + 23: 0, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 3}, + 30:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 3, + 22: 3, + 15: 2, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 2, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 3, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 3, + 12: 2, + 28: 0, + 23: 0, + 42: 0, + 24: 3, + 19: 2, + 26: 3, + 27: 1}, + 4:{ + 60: 0, + 55: 0, + 58: 0, + 36: 2, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 2, + 22: 3, + 15: 3, + 1: 2, + 29: 3, + 20: 3, + 21: 3, + 3: 3, + 32: 0, + 13: 3, + 25: 3, + 5: 3, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 2, + 4: 2, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 3, + 28: 3, + 23: 3, + 42: 2, + 24: 2, + 19: 1, + 26: 3, + 27: 2}, + 9:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 3, + 11: 0, + 16: 3, + 10: 0, + 6: 2, + 30: 0, + 4: 3, + 9: 0, + 8: 3, + 14: 2, + 7: 0, + 2: 3, + 12: 3, + 28: 0, + 23: 2, + 42: 0, + 24: 3, + 19: 3, + 26: 2, + 27: 3}, + 8:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 2, + 20: 3, + 21: 2, + 3: 3, + 32: 0, + 13: 3, + 25: 3, + 5: 3, + 11: 3, + 16: 1, + 10: 3, + 6: 3, + 30: 2, + 4: 3, + 9: 2, + 8: 2, + 14: 0, + 7: 2, + 2: 3, + 12: 3, + 28: 3, + 23: 3, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 3}, + 14:{ + 60: 0, + 55: 0, + 58: 0, + 36: 2, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 0, + 20: 0, + 21: 0, + 3: 0, + 32: 0, + 13: 0, + 25: 0, + 5: 0, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 0, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 0, + 12: 0, + 28: 0, + 23: 0, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 7:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 2, + 22: 3, + 15: 3, + 1: 3, + 29: 3, + 20: 0, + 21: 2, + 3: 3, + 32: 0, + 13: 3, + 25: 3, + 5: 3, + 11: 3, + 16: 2, + 10: 3, + 6: 0, + 30: 0, + 4: 3, + 9: 3, + 8: 0, + 14: 0, + 7: 3, + 2: 3, + 12: 3, + 28: 3, + 23: 3, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 2}, + 2:{ + 60: 0, + 55: 2, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 2, + 13: 3, + 25: 0, + 5: 3, + 11: 2, + 16: 2, + 10: 3, + 6: 0, + 30: 0, + 4: 3, + 9: 0, + 8: 3, + 14: 0, + 7: 3, + 2: 3, + 12: 3, + 28: 2, + 23: 0, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 3}, + 12:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 2, + 22: 3, + 15: 2, + 1: 3, + 29: 2, + 20: 3, + 21: 2, + 3: 2, + 32: 2, + 13: 2, + 25: 3, + 5: 2, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 3, + 4: 3, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 2, + 23: 3, + 42: 2, + 24: 2, + 19: 2, + 26: 0, + 27: 2}, + 28:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 3, + 22: 3, + 15: 3, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 2, + 25: 2, + 5: 3, + 11: 0, + 16: 2, + 10: 0, + 6: 1, + 30: 0, + 4: 3, + 9: 0, + 8: 3, + 14: 0, + 7: 0, + 2: 3, + 12: 3, + 28: 1, + 23: 0, + 42: 0, + 24: 3, + 19: 3, + 26: 2, + 27: 2}, + 23:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 3, + 18: 2, + 22: 3, + 15: 3, + 1: 3, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 2, + 25: 2, + 5: 3, + 11: 0, + 16: 2, + 10: 2, + 6: 3, + 30: 0, + 4: 3, + 9: 0, + 8: 3, + 14: 0, + 7: 0, + 2: 3, + 12: 3, + 28: 0, + 23: 2, + 42: 0, + 24: 3, + 19: 3, + 26: 3, + 27: 3}, + 42:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 2, + 18: 2, + 22: 1, + 15: 2, + 1: 2, + 29: 0, + 20: 0, + 21: 0, + 3: 3, + 32: 0, + 13: 3, + 25: 0, + 5: 2, + 11: 0, + 16: 0, + 10: 0, + 6: 0, + 30: 0, + 4: 2, + 9: 0, + 8: 0, + 14: 0, + 7: 0, + 2: 2, + 12: 1, + 28: 0, + 23: 0, + 42: 0, + 24: 2, + 19: 0, + 26: 0, + 27: 0}, + 24:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 1, + 18: 0, + 22: 2, + 15: 0, + 1: 0, + 29: 2, + 20: 3, + 21: 2, + 3: 0, + 32: 0, + 13: 0, + 25: 3, + 5: 2, + 11: 0, + 16: 2, + 10: 3, + 6: 3, + 30: 0, + 4: 0, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 2, + 23: 2, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 19:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 3, + 20: 3, + 21: 3, + 3: 1, + 32: 2, + 13: 2, + 25: 2, + 5: 2, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 1, + 4: 2, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 2, + 23: 3, + 42: 2, + 24: 0, + 19: 0, + 26: 0, + 27: 0}, + 26:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 2, + 29: 2, + 20: 2, + 21: 1, + 3: 3, + 32: 0, + 13: 2, + 25: 3, + 5: 0, + 11: 3, + 16: 3, + 10: 3, + 6: 3, + 30: 2, + 4: 3, + 9: 3, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 2, + 23: 2, + 42: 2, + 24: 2, + 19: 0, + 26: 0, + 27: 0}, + 27:{ + 60: 0, + 55: 0, + 58: 0, + 36: 0, + 61: 0, + 46: 0, + 54: 0, + 31: 0, + 51: 0, + 43: 0, + 41: 0, + 34: 0, + 40: 0, + 52: 0, + 47: 0, + 44: 0, + 53: 0, + 38: 0, + 49: 0, + 59: 0, + 39: 0, + 35: 0, + 48: 0, + 37: 0, + 33: 0, + 45: 0, + 56: 0, + 50: 0, + 57: 0, + 17: 0, + 18: 0, + 22: 0, + 15: 0, + 1: 0, + 29: 1, + 20: 0, + 21: 3, + 3: 0, + 32: 0, + 13: 1, + 25: 2, + 5: 2, + 11: 0, + 16: 2, + 10: 3, + 6: 3, + 30: 1, + 4: 0, + 9: 2, + 8: 3, + 14: 3, + 7: 3, + 2: 3, + 12: 0, + 28: 1, + 23: 1, + 42: 0, + 24: 0, + 19: 0, + 26: 0, + 27: 0}} +WINDOWS_1253_GREEK_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 82, + 66: 100, + 67: 104, + 68: 94, + 69: 98, + 70: 101, + 71: 116, + 72: 102, + 73: 111, + 74: 187, + 75: 117, + 76: 92, + 77: 88, + 78: 113, + 79: 85, + 80: 79, + 81: 118, + 82: 105, + 83: 83, + 84: 67, + 85: 114, + 86: 119, + 87: 95, + 88: 99, + 89: 109, + 90: 188, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 72, + 98: 70, + 99: 80, + 100: 81, + 101: 60, + 102: 96, + 103: 93, + 104: 89, + 105: 68, + 106: 120, + 107: 97, + 108: 77, + 109: 86, + 110: 69, + 111: 55, + 112: 78, + 113: 115, + 114: 65, + 115: 66, + 116: 58, + 117: 76, + 118: 106, + 119: 103, + 120: 87, + 121: 107, + 122: 112, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 255, + 129: 255, + 130: 255, + 131: 255, + 132: 255, + 133: 255, + 134: 255, + 135: 255, + 136: 255, + 137: 255, + 138: 255, + 139: 255, + 140: 255, + 141: 255, + 142: 255, + 143: 255, + 144: 255, + 145: 255, + 146: 255, + 147: 255, + 148: 255, + 149: 255, + 150: 255, + 151: 255, + 152: 255, + 153: 255, + 154: 255, + 155: 255, + 156: 255, + 157: 255, + 158: 255, + 159: 255, + 160: 253, + 161: 233, + 162: 61, + 163: 253, + 164: 253, + 165: 253, + 166: 253, + 167: 253, + 168: 253, + 169: 253, + 170: 253, + 171: 253, + 172: 253, + 173: 74, + 174: 253, + 175: 253, + 176: 253, + 177: 253, + 178: 253, + 179: 253, + 180: 247, + 181: 253, + 182: 253, + 183: 36, + 184: 46, + 185: 71, + 186: 73, + 187: 253, + 188: 54, + 189: 253, + 190: 108, + 191: 123, + 192: 110, + 193: 31, + 194: 51, + 195: 43, + 196: 41, + 197: 34, + 198: 91, + 199: 40, + 200: 52, + 201: 47, + 202: 44, + 203: 53, + 204: 38, + 205: 49, + 206: 59, + 207: 39, + 208: 35, + 209: 48, + 210: 250, + 211: 37, + 212: 33, + 213: 45, + 214: 56, + 215: 50, + 216: 84, + 217: 57, + 218: 120, + 219: 121, + 220: 17, + 221: 18, + 222: 22, + 223: 15, + 224: 124, + 225: 1, + 226: 29, + 227: 20, + 228: 21, + 229: 3, + 230: 32, + 231: 13, + 232: 25, + 233: 5, + 234: 11, + 235: 16, + 236: 10, + 237: 6, + 238: 30, + 239: 4, + 240: 9, + 241: 8, + 242: 14, + 243: 7, + 244: 2, + 245: 12, + 246: 28, + 247: 23, + 248: 42, + 249: 24, + 250: 64, + 251: 75, + 252: 19, + 253: 26, + 254: 27, + 255: 253} +WINDOWS_1253_GREEK_MODEL = SingleByteCharSetModel(charset_name="windows-1253", language="Greek", + char_to_order_map=WINDOWS_1253_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet="ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ") +ISO_8859_7_GREEK_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 82, + 66: 100, + 67: 104, + 68: 94, + 69: 98, + 70: 101, + 71: 116, + 72: 102, + 73: 111, + 74: 187, + 75: 117, + 76: 92, + 77: 88, + 78: 113, + 79: 85, + 80: 79, + 81: 118, + 82: 105, + 83: 83, + 84: 67, + 85: 114, + 86: 119, + 87: 95, + 88: 99, + 89: 109, + 90: 188, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 72, + 98: 70, + 99: 80, + 100: 81, + 101: 60, + 102: 96, + 103: 93, + 104: 89, + 105: 68, + 106: 120, + 107: 97, + 108: 77, + 109: 86, + 110: 69, + 111: 55, + 112: 78, + 113: 115, + 114: 65, + 115: 66, + 116: 58, + 117: 76, + 118: 106, + 119: 103, + 120: 87, + 121: 107, + 122: 112, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 255, + 129: 255, + 130: 255, + 131: 255, + 132: 255, + 133: 255, + 134: 255, + 135: 255, + 136: 255, + 137: 255, + 138: 255, + 139: 255, + 140: 255, + 141: 255, + 142: 255, + 143: 255, + 144: 255, + 145: 255, + 146: 255, + 147: 255, + 148: 255, + 149: 255, + 150: 255, + 151: 255, + 152: 255, + 153: 255, + 154: 255, + 155: 255, + 156: 255, + 157: 255, + 158: 255, + 159: 255, + 160: 253, + 161: 233, + 162: 90, + 163: 253, + 164: 253, + 165: 253, + 166: 253, + 167: 253, + 168: 253, + 169: 253, + 170: 253, + 171: 253, + 172: 253, + 173: 74, + 174: 253, + 175: 253, + 176: 253, + 177: 253, + 178: 253, + 179: 253, + 180: 247, + 181: 248, + 182: 61, + 183: 36, + 184: 46, + 185: 71, + 186: 73, + 187: 253, + 188: 54, + 189: 253, + 190: 108, + 191: 123, + 192: 110, + 193: 31, + 194: 51, + 195: 43, + 196: 41, + 197: 34, + 198: 91, + 199: 40, + 200: 52, + 201: 47, + 202: 44, + 203: 53, + 204: 38, + 205: 49, + 206: 59, + 207: 39, + 208: 35, + 209: 48, + 210: 250, + 211: 37, + 212: 33, + 213: 45, + 214: 56, + 215: 50, + 216: 84, + 217: 57, + 218: 120, + 219: 121, + 220: 17, + 221: 18, + 222: 22, + 223: 15, + 224: 124, + 225: 1, + 226: 29, + 227: 20, + 228: 21, + 229: 3, + 230: 32, + 231: 13, + 232: 25, + 233: 5, + 234: 11, + 235: 16, + 236: 10, + 237: 6, + 238: 30, + 239: 4, + 240: 9, + 241: 8, + 242: 14, + 243: 7, + 244: 2, + 245: 12, + 246: 28, + 247: 23, + 248: 42, + 249: 24, + 250: 64, + 251: 75, + 252: 19, + 253: 26, + 254: 27, + 255: 253} +ISO_8859_7_GREEK_MODEL = SingleByteCharSetModel(charset_name="ISO-8859-7", language="Greek", + char_to_order_map=ISO_8859_7_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet="ΆΈΉΊΌΎΏΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπρςστυφχψωόύώ") diff --git a/APPS_UNCOMPILED/lib/chardet/langhebrewmodel.py b/APPS_UNCOMPILED/lib/chardet/langhebrewmodel.py new file mode 100644 index 0000000..c10fd01 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/langhebrewmodel.py @@ -0,0 +1,4303 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/langhebrewmodel.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 98764 bytes +from chardet.sbcharsetprober import SingleByteCharSetModel +HEBREW_LANG_MODEL = {50:{ + 50: 0, + 60: 1, + 61: 1, + 42: 1, + 53: 1, + 56: 2, + 54: 2, + 49: 0, + 51: 2, + 43: 1, + 44: 2, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 1, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 1, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 1, + 7: 0, + 10: 1, + 5: 0, + 32: 0, + 52: 1, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 60:{ + 50: 1, + 60: 1, + 61: 0, + 42: 1, + 53: 1, + 56: 1, + 54: 0, + 49: 1, + 51: 1, + 43: 1, + 44: 2, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 0, + 20: 0, + 16: 0, + 3: 1, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 1, + 23: 0, + 12: 1, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 61:{ + 50: 1, + 60: 0, + 61: 1, + 42: 1, + 53: 1, + 56: 1, + 54: 1, + 49: 2, + 51: 1, + 43: 1, + 44: 0, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 1, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 1, + 52: 1, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 42:{ + 50: 1, + 60: 1, + 61: 2, + 42: 1, + 53: 1, + 56: 2, + 54: 2, + 49: 1, + 51: 2, + 43: 2, + 44: 2, + 63: 1, + 34: 1, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 1, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 1, + 52: 2, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 53:{ + 50: 1, + 60: 2, + 61: 1, + 42: 1, + 53: 0, + 56: 1, + 54: 2, + 49: 2, + 51: 1, + 43: 2, + 44: 2, + 63: 1, + 34: 0, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 1, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 56:{ + 50: 1, + 60: 1, + 61: 1, + 42: 2, + 53: 2, + 56: 2, + 54: 1, + 49: 1, + 51: 0, + 43: 1, + 44: 1, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 1, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 54:{ + 50: 1, + 60: 1, + 61: 1, + 42: 1, + 53: 1, + 56: 1, + 54: 1, + 49: 1, + 51: 0, + 43: 1, + 44: 2, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 1, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 2, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 49:{ + 50: 1, + 60: 1, + 61: 1, + 42: 1, + 53: 1, + 56: 1, + 54: 2, + 49: 1, + 51: 2, + 43: 1, + 44: 1, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 1, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 51:{ + 50: 2, + 60: 1, + 61: 1, + 42: 2, + 53: 1, + 56: 1, + 54: 1, + 49: 2, + 51: 1, + 43: 1, + 44: 1, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 2, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 43:{ + 50: 1, + 60: 1, + 61: 0, + 42: 2, + 53: 1, + 56: 1, + 54: 1, + 49: 1, + 51: 1, + 43: 1, + 44: 2, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 1, + 47: 0, + 46: 2, + 58: 0, + 40: 2}, + 44:{ + 50: 1, + 60: 1, + 61: 0, + 42: 2, + 53: 2, + 56: 1, + 54: 0, + 49: 1, + 51: 1, + 43: 1, + 44: 1, + 63: 1, + 34: 1, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 2, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 63:{ + 50: 1, + 60: 1, + 61: 1, + 42: 1, + 53: 1, + 56: 1, + 54: 1, + 49: 0, + 51: 1, + 43: 2, + 44: 1, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 1, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 34:{ + 50: 1, + 60: 0, + 61: 1, + 42: 0, + 53: 1, + 56: 0, + 54: 1, + 49: 1, + 51: 0, + 43: 1, + 44: 1, + 63: 0, + 34: 2, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 1, + 20: 1, + 16: 1, + 3: 1, + 2: 1, + 24: 1, + 14: 1, + 22: 1, + 1: 2, + 25: 0, + 15: 1, + 4: 1, + 11: 0, + 6: 2, + 23: 0, + 12: 1, + 19: 1, + 13: 1, + 26: 0, + 18: 1, + 27: 0, + 21: 1, + 17: 1, + 7: 1, + 10: 1, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 55:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 1, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 0, + 20: 0, + 16: 0, + 3: 1, + 2: 1, + 24: 0, + 14: 0, + 22: 0, + 1: 2, + 25: 0, + 15: 0, + 4: 1, + 11: 0, + 6: 1, + 23: 1, + 12: 1, + 19: 1, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 1, + 10: 1, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 48:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 1, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 1, + 4: 1, + 11: 0, + 6: 1, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 39:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 1, + 4: 1, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 1, + 17: 1, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 57:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 30:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 1, + 31: 0, + 29: 0, + 35: 1, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 2, + 20: 2, + 16: 2, + 3: 2, + 2: 2, + 24: 2, + 14: 2, + 22: 2, + 1: 2, + 25: 2, + 15: 2, + 4: 2, + 11: 1, + 6: 2, + 23: 0, + 12: 2, + 19: 2, + 13: 2, + 26: 0, + 18: 2, + 27: 0, + 21: 2, + 17: 2, + 7: 2, + 10: 2, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 59:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 1, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 1, + 20: 1, + 16: 0, + 3: 0, + 2: 0, + 24: 1, + 14: 0, + 22: 0, + 1: 1, + 25: 0, + 15: 1, + 4: 2, + 11: 0, + 6: 2, + 23: 0, + 12: 1, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 1, + 10: 1, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 41:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 2, + 20: 1, + 16: 2, + 3: 1, + 2: 1, + 24: 1, + 14: 1, + 22: 1, + 1: 1, + 25: 1, + 15: 1, + 4: 2, + 11: 0, + 6: 2, + 23: 0, + 12: 2, + 19: 1, + 13: 0, + 26: 0, + 18: 1, + 27: 0, + 21: 2, + 17: 1, + 7: 2, + 10: 2, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 33:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 1, + 59: 0, + 41: 0, + 33: 1, + 37: 0, + 36: 1, + 31: 0, + 29: 1, + 35: 0, + 62: 0, + 28: 1, + 38: 0, + 45: 0, + 9: 1, + 8: 2, + 20: 2, + 16: 2, + 3: 1, + 2: 1, + 24: 2, + 14: 1, + 22: 1, + 1: 3, + 25: 1, + 15: 2, + 4: 2, + 11: 2, + 6: 2, + 23: 2, + 12: 2, + 19: 2, + 13: 1, + 26: 0, + 18: 2, + 27: 1, + 21: 2, + 17: 2, + 7: 2, + 10: 2, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 37:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 1, + 31: 1, + 29: 1, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 2, + 20: 1, + 16: 2, + 3: 2, + 2: 1, + 24: 1, + 14: 2, + 22: 1, + 1: 3, + 25: 2, + 15: 1, + 4: 2, + 11: 2, + 6: 1, + 23: 2, + 12: 2, + 19: 1, + 13: 2, + 26: 1, + 18: 1, + 27: 1, + 21: 1, + 17: 1, + 7: 2, + 10: 2, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 36:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 1, + 31: 1, + 29: 1, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 2, + 20: 1, + 16: 2, + 3: 2, + 2: 1, + 24: 1, + 14: 2, + 22: 1, + 1: 2, + 25: 2, + 15: 1, + 4: 2, + 11: 2, + 6: 2, + 23: 2, + 12: 2, + 19: 2, + 13: 1, + 26: 1, + 18: 1, + 27: 2, + 21: 1, + 17: 1, + 7: 2, + 10: 2, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 31:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 1, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 1, + 31: 0, + 29: 2, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 2, + 20: 2, + 16: 2, + 3: 2, + 2: 1, + 24: 2, + 14: 2, + 22: 2, + 1: 3, + 25: 1, + 15: 2, + 4: 2, + 11: 2, + 6: 2, + 23: 2, + 12: 2, + 19: 2, + 13: 2, + 26: 2, + 18: 2, + 27: 1, + 21: 2, + 17: 2, + 7: 2, + 10: 2, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 29:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 1, + 29: 2, + 35: 0, + 62: 0, + 28: 1, + 38: 0, + 45: 0, + 9: 2, + 8: 2, + 20: 2, + 16: 2, + 3: 3, + 2: 2, + 24: 2, + 14: 2, + 22: 1, + 1: 2, + 25: 2, + 15: 2, + 4: 2, + 11: 2, + 6: 2, + 23: 2, + 12: 2, + 19: 1, + 13: 2, + 26: 1, + 18: 2, + 27: 1, + 21: 2, + 17: 2, + 7: 2, + 10: 2, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 35:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 1, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 2, + 20: 1, + 16: 2, + 3: 2, + 2: 1, + 24: 1, + 14: 1, + 22: 1, + 1: 1, + 25: 1, + 15: 2, + 4: 2, + 11: 2, + 6: 2, + 23: 2, + 12: 2, + 19: 2, + 13: 2, + 26: 1, + 18: 2, + 27: 1, + 21: 2, + 17: 2, + 7: 2, + 10: 2, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 62:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 1, + 20: 1, + 16: 1, + 3: 1, + 2: 1, + 24: 1, + 14: 1, + 22: 0, + 1: 1, + 25: 0, + 15: 1, + 4: 2, + 11: 1, + 6: 1, + 23: 1, + 12: 1, + 19: 1, + 13: 1, + 26: 0, + 18: 1, + 27: 0, + 21: 1, + 17: 1, + 7: 1, + 10: 1, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 28:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 3, + 59: 0, + 41: 1, + 33: 3, + 37: 2, + 36: 2, + 31: 3, + 29: 3, + 35: 2, + 62: 1, + 28: 0, + 38: 2, + 45: 1, + 9: 2, + 8: 2, + 20: 1, + 16: 2, + 3: 1, + 2: 2, + 24: 1, + 14: 1, + 22: 1, + 1: 2, + 25: 2, + 15: 2, + 4: 2, + 11: 1, + 6: 2, + 23: 1, + 12: 2, + 19: 1, + 13: 2, + 26: 1, + 18: 1, + 27: 1, + 21: 1, + 17: 1, + 7: 2, + 10: 2, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 38:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 1, + 62: 1, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 2, + 24: 0, + 14: 0, + 22: 0, + 1: 1, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 1, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 45:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 1, + 36: 2, + 31: 1, + 29: 2, + 35: 1, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 0, + 20: 1, + 16: 0, + 3: 1, + 2: 2, + 24: 0, + 14: 1, + 22: 0, + 1: 1, + 25: 0, + 15: 0, + 4: 0, + 11: 1, + 6: 1, + 23: 0, + 12: 1, + 19: 0, + 13: 1, + 26: 0, + 18: 1, + 27: 0, + 21: 0, + 17: 0, + 7: 1, + 10: 0, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 9:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 1, + 48: 1, + 39: 0, + 57: 0, + 30: 0, + 59: 2, + 41: 2, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 3, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 2, + 26: 3, + 18: 3, + 27: 1, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 8:{ + 50: 0, + 60: 0, + 61: 1, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 3, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 2, + 15: 3, + 4: 3, + 11: 2, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 3, + 26: 1, + 18: 3, + 27: 2, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 1, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 20:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 2, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 1, + 37: 1, + 36: 1, + 31: 2, + 29: 2, + 35: 1, + 62: 0, + 28: 2, + 38: 0, + 45: 0, + 9: 2, + 8: 3, + 20: 2, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 2, + 22: 2, + 1: 3, + 25: 1, + 15: 1, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 2, + 13: 3, + 26: 2, + 18: 2, + 27: 1, + 21: 1, + 17: 1, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 1, + 47: 0, + 46: 1, + 58: 0, + 40: 0}, + 16:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 1, + 14: 2, + 22: 2, + 1: 3, + 25: 2, + 15: 2, + 4: 3, + 11: 3, + 6: 3, + 23: 2, + 12: 3, + 19: 2, + 13: 3, + 26: 2, + 18: 3, + 27: 0, + 21: 2, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 3:{ + 50: 0, + 60: 0, + 61: 1, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 0, + 48: 1, + 39: 0, + 57: 0, + 30: 1, + 59: 1, + 41: 2, + 33: 2, + 37: 2, + 36: 2, + 31: 3, + 29: 2, + 35: 1, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 1, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 3, + 26: 0, + 18: 3, + 27: 1, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 1, + 52: 1, + 47: 0, + 46: 1, + 58: 0, + 40: 2}, + 2:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 1, + 63: 0, + 34: 1, + 55: 1, + 48: 1, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 1, + 36: 1, + 31: 2, + 29: 2, + 35: 3, + 62: 0, + 28: 3, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 3, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 3, + 26: 3, + 18: 3, + 27: 3, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 1, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 2}, + 24:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 1, + 33: 1, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 1, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 2, + 20: 2, + 16: 2, + 3: 3, + 2: 3, + 24: 2, + 14: 2, + 22: 1, + 1: 3, + 25: 1, + 15: 3, + 4: 3, + 11: 2, + 6: 3, + 23: 2, + 12: 2, + 19: 1, + 13: 2, + 26: 1, + 18: 1, + 27: 0, + 21: 2, + 17: 3, + 7: 3, + 10: 1, + 5: 2, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 1}, + 14:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 1, + 41: 2, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 3, + 20: 2, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 2, + 22: 2, + 1: 3, + 25: 1, + 15: 2, + 4: 3, + 11: 3, + 6: 3, + 23: 2, + 12: 3, + 19: 3, + 13: 1, + 26: 2, + 18: 2, + 27: 2, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 1, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 22:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 1, + 36: 1, + 31: 2, + 29: 1, + 35: 1, + 62: 1, + 28: 1, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 1, + 3: 3, + 2: 3, + 24: 2, + 14: 3, + 22: 2, + 1: 3, + 25: 1, + 15: 2, + 4: 3, + 11: 2, + 6: 2, + 23: 2, + 12: 3, + 19: 2, + 13: 3, + 26: 2, + 18: 3, + 27: 1, + 21: 2, + 17: 2, + 7: 3, + 10: 2, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 1}, + 1:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 1, + 48: 1, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 1, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 3, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 3, + 26: 3, + 18: 3, + 27: 3, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 1, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 2}, + 25:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 2, + 35: 0, + 62: 0, + 28: 1, + 38: 0, + 45: 0, + 9: 1, + 8: 0, + 20: 0, + 16: 0, + 3: 1, + 2: 0, + 24: 0, + 14: 1, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 1, + 11: 0, + 6: 1, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 1, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 1}, + 15:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 1, + 62: 1, + 28: 3, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 2, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 2, + 1: 3, + 25: 3, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 2, + 26: 3, + 18: 3, + 27: 1, + 21: 2, + 17: 2, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 4:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 3, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 3, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 2, + 12: 3, + 19: 3, + 13: 3, + 26: 2, + 18: 3, + 27: 2, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 1, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 11:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 1, + 20: 1, + 16: 0, + 3: 1, + 2: 1, + 24: 1, + 14: 1, + 22: 0, + 1: 1, + 25: 0, + 15: 1, + 4: 1, + 11: 1, + 6: 1, + 23: 0, + 12: 1, + 19: 0, + 13: 1, + 26: 0, + 18: 1, + 27: 1, + 21: 1, + 17: 1, + 7: 1, + 10: 1, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 2}, + 6:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 2, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 3, + 26: 0, + 18: 3, + 27: 2, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 1}, + 23:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 0, + 48: 1, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 1, + 20: 1, + 16: 1, + 3: 1, + 2: 1, + 24: 0, + 14: 1, + 22: 1, + 1: 1, + 25: 0, + 15: 1, + 4: 1, + 11: 1, + 6: 1, + 23: 0, + 12: 1, + 19: 1, + 13: 1, + 26: 1, + 18: 1, + 27: 0, + 21: 0, + 17: 1, + 7: 1, + 10: 1, + 5: 1, + 32: 1, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 2}, + 12:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 1, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 2, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 3, + 26: 2, + 18: 3, + 27: 2, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 19:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 1, + 36: 2, + 31: 2, + 29: 1, + 35: 1, + 62: 2, + 28: 2, + 38: 0, + 45: 0, + 9: 2, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 1, + 14: 3, + 22: 3, + 1: 3, + 25: 2, + 15: 3, + 4: 3, + 11: 2, + 6: 3, + 23: 2, + 12: 3, + 19: 2, + 13: 3, + 26: 3, + 18: 3, + 27: 0, + 21: 2, + 17: 3, + 7: 3, + 10: 1, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 13:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 1, + 39: 0, + 57: 0, + 30: 1, + 59: 1, + 41: 2, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 1, + 22: 3, + 1: 3, + 25: 2, + 15: 2, + 4: 3, + 11: 3, + 6: 3, + 23: 2, + 12: 3, + 19: 3, + 13: 2, + 26: 1, + 18: 2, + 27: 2, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 26:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 1, + 24: 0, + 14: 1, + 22: 0, + 1: 0, + 25: 0, + 15: 1, + 4: 1, + 11: 0, + 6: 1, + 23: 0, + 12: 0, + 19: 1, + 13: 0, + 26: 1, + 18: 1, + 27: 0, + 21: 0, + 17: 1, + 7: 1, + 10: 1, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 1}, + 18:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 1, + 36: 2, + 31: 1, + 29: 2, + 35: 1, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 2, + 20: 3, + 16: 2, + 3: 3, + 2: 3, + 24: 2, + 14: 3, + 22: 3, + 1: 3, + 25: 2, + 15: 3, + 4: 3, + 11: 2, + 6: 2, + 23: 3, + 12: 3, + 19: 3, + 13: 3, + 26: 2, + 18: 2, + 27: 2, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 0}, + 27:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 1, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 1, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 1, + 10: 0, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 1}, + 21:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 1, + 31: 2, + 29: 2, + 35: 1, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 2, + 16: 3, + 3: 3, + 2: 3, + 24: 1, + 14: 3, + 22: 2, + 1: 3, + 25: 1, + 15: 1, + 4: 3, + 11: 2, + 6: 3, + 23: 2, + 12: 3, + 19: 1, + 13: 3, + 26: 2, + 18: 3, + 27: 2, + 21: 2, + 17: 3, + 7: 3, + 10: 0, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 17:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 1, + 48: 0, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 1, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 2, + 16: 3, + 3: 3, + 2: 3, + 24: 2, + 14: 3, + 22: 3, + 1: 3, + 25: 1, + 15: 1, + 4: 3, + 11: 2, + 6: 3, + 23: 2, + 12: 3, + 19: 3, + 13: 3, + 26: 2, + 18: 3, + 27: 2, + 21: 3, + 17: 2, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 1, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 7:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 2, + 48: 1, + 39: 0, + 57: 0, + 30: 2, + 59: 0, + 41: 1, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 2, + 62: 1, + 28: 0, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 3, + 14: 3, + 22: 3, + 1: 3, + 25: 3, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 3, + 13: 3, + 26: 2, + 18: 3, + 27: 3, + 21: 3, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 2}, + 10:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 1, + 59: 0, + 41: 0, + 33: 1, + 37: 1, + 36: 1, + 31: 1, + 29: 1, + 35: 1, + 62: 1, + 28: 2, + 38: 3, + 45: 2, + 9: 3, + 8: 3, + 20: 3, + 16: 3, + 3: 3, + 2: 3, + 24: 2, + 14: 3, + 22: 3, + 1: 3, + 25: 3, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 2, + 12: 3, + 19: 2, + 13: 3, + 26: 2, + 18: 3, + 27: 1, + 21: 2, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 1}, + 5:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 1, + 55: 0, + 48: 1, + 39: 1, + 57: 0, + 30: 2, + 59: 0, + 41: 0, + 33: 2, + 37: 2, + 36: 2, + 31: 2, + 29: 2, + 35: 1, + 62: 1, + 28: 2, + 38: 0, + 45: 0, + 9: 3, + 8: 3, + 20: 3, + 16: 2, + 3: 3, + 2: 3, + 24: 2, + 14: 3, + 22: 2, + 1: 3, + 25: 2, + 15: 3, + 4: 3, + 11: 3, + 6: 3, + 23: 3, + 12: 3, + 19: 2, + 13: 3, + 26: 2, + 18: 3, + 27: 1, + 21: 2, + 17: 3, + 7: 3, + 10: 3, + 5: 3, + 32: 1, + 52: 1, + 47: 0, + 46: 0, + 58: 0, + 40: 2}, + 32:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 1, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 1, + 20: 1, + 16: 1, + 3: 1, + 2: 1, + 24: 0, + 14: 1, + 22: 0, + 1: 1, + 25: 0, + 15: 1, + 4: 1, + 11: 0, + 6: 1, + 23: 0, + 12: 0, + 19: 1, + 13: 1, + 26: 0, + 18: 1, + 27: 0, + 21: 1, + 17: 0, + 7: 1, + 10: 1, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 52:{ + 50: 1, + 60: 0, + 61: 1, + 42: 1, + 53: 1, + 56: 1, + 54: 0, + 49: 0, + 51: 1, + 43: 2, + 44: 2, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 1, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 1, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 47:{ + 50: 1, + 60: 1, + 61: 1, + 42: 1, + 53: 1, + 56: 1, + 54: 1, + 49: 1, + 51: 1, + 43: 1, + 44: 1, + 63: 1, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 2, + 8: 1, + 20: 1, + 16: 1, + 3: 1, + 2: 1, + 24: 1, + 14: 1, + 22: 1, + 1: 1, + 25: 0, + 15: 1, + 4: 1, + 11: 0, + 6: 1, + 23: 0, + 12: 1, + 19: 1, + 13: 1, + 26: 0, + 18: 1, + 27: 0, + 21: 1, + 17: 1, + 7: 1, + 10: 1, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 46:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 1, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 1, + 20: 1, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 1, + 25: 0, + 15: 1, + 4: 1, + 11: 0, + 6: 1, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 1, + 17: 0, + 7: 1, + 10: 0, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 0, + 40: 0}, + 58:{ + 50: 0, + 60: 0, + 61: 0, + 42: 0, + 53: 0, + 56: 0, + 54: 0, + 49: 0, + 51: 0, + 43: 0, + 44: 0, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 0, + 8: 0, + 20: 0, + 16: 0, + 3: 0, + 2: 0, + 24: 0, + 14: 0, + 22: 0, + 1: 0, + 25: 0, + 15: 0, + 4: 0, + 11: 0, + 6: 0, + 23: 0, + 12: 0, + 19: 0, + 13: 0, + 26: 0, + 18: 0, + 27: 0, + 21: 0, + 17: 0, + 7: 0, + 10: 0, + 5: 0, + 32: 0, + 52: 0, + 47: 0, + 46: 0, + 58: 2, + 40: 0}, + 40:{ + 50: 1, + 60: 1, + 61: 1, + 42: 1, + 53: 1, + 56: 0, + 54: 1, + 49: 0, + 51: 1, + 43: 1, + 44: 1, + 63: 0, + 34: 0, + 55: 0, + 48: 0, + 39: 0, + 57: 0, + 30: 0, + 59: 0, + 41: 0, + 33: 0, + 37: 0, + 36: 0, + 31: 0, + 29: 0, + 35: 0, + 62: 0, + 28: 0, + 38: 0, + 45: 0, + 9: 1, + 8: 0, + 20: 0, + 16: 0, + 3: 1, + 2: 1, + 24: 1, + 14: 0, + 22: 0, + 1: 1, + 25: 0, + 15: 1, + 4: 1, + 11: 0, + 6: 1, + 23: 0, + 12: 1, + 19: 0, + 13: 0, + 26: 0, + 18: 1, + 27: 0, + 21: 0, + 17: 0, + 7: 1, + 10: 1, + 5: 1, + 32: 0, + 52: 0, + 47: 0, + 46: 1, + 58: 0, + 40: 2}} +WINDOWS_1255_HEBREW_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 69, + 66: 91, + 67: 79, + 68: 80, + 69: 92, + 70: 89, + 71: 97, + 72: 90, + 73: 68, + 74: 111, + 75: 112, + 76: 82, + 77: 73, + 78: 95, + 79: 85, + 80: 78, + 81: 121, + 82: 86, + 83: 71, + 84: 67, + 85: 102, + 86: 107, + 87: 84, + 88: 114, + 89: 103, + 90: 115, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 50, + 98: 74, + 99: 60, + 100: 61, + 101: 42, + 102: 76, + 103: 70, + 104: 64, + 105: 53, + 106: 105, + 107: 93, + 108: 56, + 109: 65, + 110: 54, + 111: 49, + 112: 66, + 113: 110, + 114: 51, + 115: 43, + 116: 44, + 117: 63, + 118: 81, + 119: 77, + 120: 98, + 121: 75, + 122: 108, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 124, + 129: 202, + 130: 203, + 131: 204, + 132: 205, + 133: 40, + 134: 58, + 135: 206, + 136: 207, + 137: 208, + 138: 209, + 139: 210, + 140: 211, + 141: 212, + 142: 213, + 143: 214, + 144: 215, + 145: 83, + 146: 52, + 147: 47, + 148: 46, + 149: 72, + 150: 32, + 151: 94, + 152: 216, + 153: 113, + 154: 217, + 155: 109, + 156: 218, + 157: 219, + 158: 220, + 159: 221, + 160: 34, + 161: 116, + 162: 222, + 163: 118, + 164: 100, + 165: 223, + 166: 224, + 167: 117, + 168: 119, + 169: 104, + 170: 125, + 171: 225, + 172: 226, + 173: 87, + 174: 99, + 175: 227, + 176: 106, + 177: 122, + 178: 123, + 179: 228, + 180: 55, + 181: 229, + 182: 230, + 183: 101, + 184: 231, + 185: 232, + 186: 120, + 187: 233, + 188: 48, + 189: 39, + 190: 57, + 191: 234, + 192: 30, + 193: 59, + 194: 41, + 195: 88, + 196: 33, + 197: 37, + 198: 36, + 199: 31, + 200: 29, + 201: 35, + 202: 235, + 203: 62, + 204: 28, + 205: 236, + 206: 126, + 207: 237, + 208: 238, + 209: 38, + 210: 45, + 211: 239, + 212: 240, + 213: 241, + 214: 242, + 215: 243, + 216: 127, + 217: 244, + 218: 245, + 219: 246, + 220: 247, + 221: 248, + 222: 249, + 223: 250, + 224: 9, + 225: 8, + 226: 20, + 227: 16, + 228: 3, + 229: 2, + 230: 24, + 231: 14, + 232: 22, + 233: 1, + 234: 25, + 235: 15, + 236: 4, + 237: 11, + 238: 6, + 239: 23, + 240: 12, + 241: 19, + 242: 13, + 243: 26, + 244: 18, + 245: 27, + 246: 21, + 247: 17, + 248: 7, + 249: 10, + 250: 5, + 251: 251, + 252: 252, + 253: 128, + 254: 96, + 255: 253} +WINDOWS_1255_HEBREW_MODEL = SingleByteCharSetModel(charset_name="windows-1255", language="Hebrew", + char_to_order_map=WINDOWS_1255_HEBREW_CHAR_TO_ORDER, + language_model=HEBREW_LANG_MODEL, + typical_positive_ratio=0.984004, + keep_ascii_letters=False, + alphabet="אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ") diff --git a/APPS_UNCOMPILED/lib/chardet/langhungarianmodel.py b/APPS_UNCOMPILED/lib/chardet/langhungarianmodel.py new file mode 100644 index 0000000..ddc413f --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/langhungarianmodel.py @@ -0,0 +1,4566 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/langhungarianmodel.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 102486 bytes +from chardet.sbcharsetprober import SingleByteCharSetModel +HUNGARIAN_LANG_MODEL = {28:{ + 28: 0, + 40: 1, + 54: 1, + 45: 2, + 32: 1, + 50: 1, + 49: 2, + 38: 1, + 39: 2, + 53: 1, + 36: 2, + 41: 2, + 34: 1, + 35: 2, + 47: 1, + 46: 2, + 43: 2, + 33: 2, + 37: 2, + 57: 1, + 48: 1, + 55: 1, + 52: 2, + 2: 0, + 18: 1, + 26: 1, + 17: 2, + 1: 1, + 27: 1, + 12: 1, + 20: 1, + 9: 1, + 22: 1, + 7: 2, + 6: 2, + 13: 2, + 4: 2, + 8: 0, + 23: 2, + 10: 2, + 5: 1, + 3: 1, + 21: 1, + 19: 1, + 62: 1, + 16: 0, + 11: 3, + 51: 1, + 44: 0, + 61: 1, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 40:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 2, + 50: 0, + 49: 0, + 38: 0, + 39: 1, + 53: 1, + 36: 1, + 41: 1, + 34: 0, + 35: 1, + 47: 2, + 46: 0, + 43: 1, + 33: 1, + 37: 1, + 57: 1, + 48: 1, + 55: 0, + 52: 0, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 3, + 27: 0, + 12: 0, + 20: 0, + 9: 2, + 22: 1, + 7: 0, + 6: 1, + 13: 0, + 4: 0, + 8: 2, + 23: 1, + 10: 2, + 5: 0, + 3: 0, + 21: 3, + 19: 0, + 62: 0, + 16: 1, + 11: 0, + 51: 1, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 2, + 15: 2, + 30: 1, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 1, + 56: 1}, + 54:{ + 28: 1, + 40: 1, + 54: 1, + 45: 1, + 32: 1, + 50: 0, + 49: 0, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 0, + 47: 1, + 46: 1, + 43: 1, + 33: 2, + 37: 1, + 57: 1, + 48: 0, + 55: 1, + 52: 1, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 1, + 27: 0, + 12: 0, + 20: 1, + 9: 1, + 22: 0, + 7: 0, + 6: 1, + 13: 0, + 4: 0, + 8: 2, + 23: 0, + 10: 1, + 5: 3, + 3: 0, + 21: 1, + 19: 0, + 62: 0, + 16: 1, + 11: 1, + 51: 1, + 44: 1, + 61: 1, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 1, + 15: 1, + 30: 1, + 25: 1, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 45:{ + 28: 2, + 40: 1, + 54: 0, + 45: 1, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 0, + 34: 1, + 35: 1, + 47: 2, + 46: 0, + 43: 1, + 33: 1, + 37: 1, + 57: 1, + 48: 1, + 55: 1, + 52: 1, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 3, + 27: 0, + 12: 0, + 20: 0, + 9: 1, + 22: 0, + 7: 0, + 6: 0, + 13: 0, + 4: 0, + 8: 1, + 23: 0, + 10: 2, + 5: 0, + 3: 0, + 21: 2, + 19: 0, + 62: 0, + 16: 1, + 11: 1, + 51: 1, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 1, + 15: 1, + 30: 1, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 1, + 56: 0}, + 32:{ + 28: 1, + 40: 1, + 54: 1, + 45: 1, + 32: 1, + 50: 1, + 49: 2, + 38: 1, + 39: 1, + 53: 1, + 36: 2, + 41: 2, + 34: 2, + 35: 2, + 47: 1, + 46: 1, + 43: 2, + 33: 2, + 37: 2, + 57: 1, + 48: 1, + 55: 1, + 52: 1, + 2: 1, + 18: 1, + 26: 1, + 17: 2, + 1: 1, + 27: 1, + 12: 3, + 20: 1, + 9: 1, + 22: 1, + 7: 1, + 6: 2, + 13: 2, + 4: 2, + 8: 0, + 23: 1, + 10: 2, + 5: 2, + 3: 1, + 21: 2, + 19: 1, + 62: 1, + 16: 0, + 11: 3, + 51: 1, + 44: 1, + 61: 0, + 58: 1, + 59: 1, + 60: 0, + 63: 1, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 1, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 50:{ + 28: 1, + 40: 0, + 54: 0, + 45: 0, + 32: 1, + 50: 1, + 49: 0, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 1, + 46: 0, + 43: 1, + 33: 0, + 37: 1, + 57: 1, + 48: 0, + 55: 1, + 52: 0, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 1, + 12: 0, + 20: 0, + 9: 2, + 22: 1, + 7: 0, + 6: 1, + 13: 0, + 4: 0, + 8: 2, + 23: 0, + 10: 2, + 5: 0, + 3: 0, + 21: 1, + 19: 0, + 62: 0, + 16: 0, + 11: 0, + 51: 1, + 44: 1, + 61: 0, + 58: 1, + 59: 1, + 60: 0, + 63: 1, + 14: 1, + 15: 1, + 30: 0, + 25: 0, + 24: 2, + 31: 1, + 29: 1, + 42: 1, + 56: 1}, + 49:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 1, + 46: 1, + 43: 1, + 33: 1, + 37: 1, + 57: 1, + 48: 1, + 55: 2, + 52: 1, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 0, + 12: 0, + 20: 0, + 9: 1, + 22: 0, + 7: 0, + 6: 1, + 13: 0, + 4: 0, + 8: 2, + 23: 0, + 10: 2, + 5: 0, + 3: 0, + 21: 1, + 19: 0, + 62: 0, + 16: 2, + 11: 0, + 51: 1, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 1, + 15: 1, + 30: 0, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 1, + 56: 0}, + 38:{ + 28: 2, + 40: 1, + 54: 1, + 45: 0, + 32: 1, + 50: 0, + 49: 0, + 38: 0, + 39: 1, + 53: 0, + 36: 0, + 41: 1, + 34: 0, + 35: 0, + 47: 1, + 46: 0, + 43: 1, + 33: 1, + 37: 1, + 57: 1, + 48: 0, + 55: 1, + 52: 0, + 2: 3, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 0, + 12: 0, + 20: 0, + 9: 2, + 22: 1, + 7: 0, + 6: 1, + 13: 1, + 4: 0, + 8: 3, + 23: 0, + 10: 1, + 5: 0, + 3: 0, + 21: 2, + 19: 0, + 62: 0, + 16: 1, + 11: 0, + 51: 2, + 44: 2, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 2, + 15: 1, + 30: 2, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 1, + 56: 1}, + 39:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 1, + 50: 1, + 49: 1, + 38: 1, + 39: 2, + 53: 1, + 36: 2, + 41: 2, + 34: 1, + 35: 2, + 47: 1, + 46: 1, + 43: 1, + 33: 2, + 37: 1, + 57: 1, + 48: 1, + 55: 0, + 52: 2, + 2: 0, + 18: 1, + 26: 1, + 17: 2, + 1: 0, + 27: 1, + 12: 2, + 20: 1, + 9: 0, + 22: 1, + 7: 1, + 6: 2, + 13: 2, + 4: 1, + 8: 0, + 23: 1, + 10: 2, + 5: 2, + 3: 2, + 21: 0, + 19: 1, + 62: 0, + 16: 0, + 11: 1, + 51: 1, + 44: 1, + 61: 0, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 53:{ + 28: 2, + 40: 0, + 54: 1, + 45: 1, + 32: 2, + 50: 0, + 49: 0, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 1, + 46: 0, + 43: 0, + 33: 1, + 37: 1, + 57: 1, + 48: 0, + 55: 0, + 52: 1, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 0, + 12: 0, + 20: 0, + 9: 1, + 22: 0, + 7: 0, + 6: 0, + 13: 0, + 4: 0, + 8: 1, + 23: 0, + 10: 0, + 5: 0, + 3: 0, + 21: 2, + 19: 0, + 62: 0, + 16: 0, + 11: 0, + 51: 1, + 44: 1, + 61: 0, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 2, + 15: 1, + 30: 0, + 25: 2, + 24: 2, + 31: 1, + 29: 0, + 42: 1, + 56: 0}, + 36:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 2, + 50: 1, + 49: 0, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 2, + 46: 0, + 43: 1, + 33: 1, + 37: 1, + 57: 1, + 48: 1, + 55: 1, + 52: 0, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 1, + 12: 0, + 20: 1, + 9: 3, + 22: 0, + 7: 0, + 6: 1, + 13: 1, + 4: 1, + 8: 2, + 23: 0, + 10: 2, + 5: 0, + 3: 0, + 21: 1, + 19: 1, + 62: 0, + 16: 1, + 11: 0, + 51: 1, + 44: 1, + 61: 1, + 58: 1, + 59: 2, + 60: 1, + 63: 1, + 14: 2, + 15: 2, + 30: 1, + 25: 1, + 24: 2, + 31: 1, + 29: 2, + 42: 1, + 56: 0}, + 41:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 2, + 34: 1, + 35: 1, + 47: 2, + 46: 0, + 43: 1, + 33: 1, + 37: 2, + 57: 1, + 48: 1, + 55: 1, + 52: 1, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 3, + 27: 0, + 12: 0, + 20: 0, + 9: 2, + 22: 1, + 7: 0, + 6: 1, + 13: 0, + 4: 0, + 8: 2, + 23: 0, + 10: 0, + 5: 0, + 3: 0, + 21: 2, + 19: 0, + 62: 0, + 16: 1, + 11: 0, + 51: 2, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 2, + 15: 1, + 30: 1, + 25: 1, + 24: 1, + 31: 0, + 29: 1, + 42: 0, + 56: 0}, + 34:{ + 28: 2, + 40: 1, + 54: 0, + 45: 0, + 32: 2, + 50: 1, + 49: 0, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 1, + 46: 1, + 43: 1, + 33: 1, + 37: 1, + 57: 1, + 48: 1, + 55: 1, + 52: 1, + 2: 3, + 18: 0, + 26: 1, + 17: 0, + 1: 3, + 27: 0, + 12: 0, + 20: 0, + 9: 3, + 22: 0, + 7: 0, + 6: 0, + 13: 1, + 4: 1, + 8: 3, + 23: 0, + 10: 1, + 5: 0, + 3: 0, + 21: 2, + 19: 0, + 62: 0, + 16: 1, + 11: 0, + 51: 2, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 2, + 15: 2, + 30: 1, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 0, + 56: 1}, + 35:{ + 28: 2, + 40: 1, + 54: 1, + 45: 2, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 1, + 46: 1, + 43: 1, + 33: 1, + 37: 2, + 57: 1, + 48: 1, + 55: 2, + 52: 1, + 2: 3, + 18: 0, + 26: 0, + 17: 0, + 1: 3, + 27: 0, + 12: 0, + 20: 0, + 9: 2, + 22: 0, + 7: 0, + 6: 0, + 13: 0, + 4: 1, + 8: 2, + 23: 0, + 10: 0, + 5: 0, + 3: 0, + 21: 1, + 19: 0, + 62: 0, + 16: 2, + 11: 0, + 51: 1, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 1, + 15: 2, + 30: 1, + 25: 1, + 24: 1, + 31: 0, + 29: 0, + 42: 1, + 56: 0}, + 47:{ + 28: 1, + 40: 1, + 54: 1, + 45: 1, + 32: 1, + 50: 1, + 49: 1, + 38: 1, + 39: 1, + 53: 1, + 36: 2, + 41: 2, + 34: 2, + 35: 2, + 47: 1, + 46: 1, + 43: 2, + 33: 2, + 37: 2, + 57: 1, + 48: 1, + 55: 1, + 52: 1, + 2: 0, + 18: 1, + 26: 1, + 17: 1, + 1: 1, + 27: 1, + 12: 1, + 20: 1, + 9: 1, + 22: 1, + 7: 2, + 6: 2, + 13: 1, + 4: 1, + 8: 1, + 23: 1, + 10: 2, + 5: 1, + 3: 2, + 21: 1, + 19: 0, + 62: 1, + 16: 0, + 11: 1, + 51: 1, + 44: 1, + 61: 0, + 58: 1, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 46:{ + 28: 1, + 40: 1, + 54: 1, + 45: 1, + 32: 1, + 50: 1, + 49: 1, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 1, + 34: 0, + 35: 1, + 47: 1, + 46: 1, + 43: 2, + 33: 1, + 37: 1, + 57: 1, + 48: 1, + 55: 0, + 52: 1, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 1, + 12: 0, + 20: 1, + 9: 2, + 22: 0, + 7: 0, + 6: 1, + 13: 0, + 4: 1, + 8: 2, + 23: 0, + 10: 2, + 5: 1, + 3: 0, + 21: 1, + 19: 0, + 62: 0, + 16: 1, + 11: 0, + 51: 2, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 0, + 63: 1, + 14: 3, + 15: 2, + 30: 0, + 25: 1, + 24: 1, + 31: 0, + 29: 1, + 42: 1, + 56: 0}, + 43:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 2, + 46: 1, + 43: 1, + 33: 2, + 37: 2, + 57: 1, + 48: 1, + 55: 1, + 52: 1, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 0, + 12: 0, + 20: 1, + 9: 2, + 22: 0, + 7: 0, + 6: 0, + 13: 0, + 4: 0, + 8: 2, + 23: 0, + 10: 0, + 5: 0, + 3: 0, + 21: 1, + 19: 0, + 62: 0, + 16: 1, + 11: 0, + 51: 2, + 44: 1, + 61: 1, + 58: 2, + 59: 1, + 60: 1, + 63: 1, + 14: 2, + 15: 2, + 30: 1, + 25: 2, + 24: 1, + 31: 1, + 29: 1, + 42: 0, + 56: 0}, + 33:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 2, + 46: 1, + 43: 1, + 33: 2, + 37: 2, + 57: 1, + 48: 1, + 55: 1, + 52: 3, + 2: 2, + 18: 0, + 26: 1, + 17: 0, + 1: 2, + 27: 0, + 12: 0, + 20: 1, + 9: 2, + 22: 0, + 7: 1, + 6: 1, + 13: 1, + 4: 0, + 8: 2, + 23: 1, + 10: 0, + 5: 0, + 3: 1, + 21: 1, + 19: 1, + 62: 0, + 16: 1, + 11: 3, + 51: 2, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 2, + 15: 1, + 30: 1, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 1, + 56: 1}, + 37:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 2, + 46: 1, + 43: 2, + 33: 1, + 37: 2, + 57: 1, + 48: 1, + 55: 1, + 52: 1, + 2: 2, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 0, + 12: 0, + 20: 1, + 9: 2, + 22: 0, + 7: 0, + 6: 0, + 13: 0, + 4: 0, + 8: 2, + 23: 0, + 10: 1, + 5: 1, + 3: 0, + 21: 2, + 19: 0, + 62: 0, + 16: 1, + 11: 1, + 51: 2, + 44: 2, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 2, + 15: 1, + 30: 1, + 25: 1, + 24: 2, + 31: 1, + 29: 1, + 42: 1, + 56: 1}, + 57:{ + 28: 1, + 40: 1, + 54: 1, + 45: 1, + 32: 1, + 50: 1, + 49: 1, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 1, + 46: 1, + 43: 1, + 33: 2, + 37: 1, + 57: 0, + 48: 1, + 55: 0, + 52: 1, + 2: 0, + 18: 1, + 26: 1, + 17: 1, + 1: 1, + 27: 0, + 12: 2, + 20: 0, + 9: 0, + 22: 1, + 7: 1, + 6: 1, + 13: 1, + 4: 1, + 8: 0, + 23: 1, + 10: 1, + 5: 1, + 3: 1, + 21: 0, + 19: 0, + 62: 0, + 16: 0, + 11: 1, + 51: 0, + 44: 0, + 61: 1, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 48:{ + 28: 2, + 40: 0, + 54: 0, + 45: 1, + 32: 2, + 50: 1, + 49: 0, + 38: 0, + 39: 2, + 53: 1, + 36: 1, + 41: 0, + 34: 1, + 35: 1, + 47: 1, + 46: 1, + 43: 1, + 33: 1, + 37: 1, + 57: 1, + 48: 1, + 55: 1, + 52: 0, + 2: 3, + 18: 0, + 26: 0, + 17: 0, + 1: 2, + 27: 0, + 12: 0, + 20: 0, + 9: 2, + 22: 0, + 7: 0, + 6: 1, + 13: 0, + 4: 0, + 8: 2, + 23: 0, + 10: 0, + 5: 0, + 3: 0, + 21: 1, + 19: 0, + 62: 0, + 16: 0, + 11: 0, + 51: 2, + 44: 2, + 61: 1, + 58: 1, + 59: 1, + 60: 0, + 63: 1, + 14: 2, + 15: 2, + 30: 1, + 25: 0, + 24: 1, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 55:{ + 28: 2, + 40: 1, + 54: 1, + 45: 1, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 1, + 46: 1, + 43: 1, + 33: 1, + 37: 1, + 57: 1, + 48: 1, + 55: 0, + 52: 2, + 2: 1, + 18: 0, + 26: 0, + 17: 1, + 1: 1, + 27: 0, + 12: 0, + 20: 0, + 9: 0, + 22: 0, + 7: 0, + 6: 0, + 13: 0, + 4: 0, + 8: 1, + 23: 1, + 10: 0, + 5: 0, + 3: 0, + 21: 0, + 19: 1, + 62: 0, + 16: 0, + 11: 0, + 51: 1, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 52:{ + 28: 2, + 40: 1, + 54: 0, + 45: 1, + 32: 2, + 50: 1, + 49: 1, + 38: 1, + 39: 2, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 2, + 46: 1, + 43: 1, + 33: 2, + 37: 1, + 57: 1, + 48: 1, + 55: 1, + 52: 1, + 2: 1, + 18: 0, + 26: 0, + 17: 0, + 1: 1, + 27: 0, + 12: 0, + 20: 0, + 9: 1, + 22: 0, + 7: 0, + 6: 0, + 13: 0, + 4: 1, + 8: 1, + 23: 0, + 10: 1, + 5: 2, + 3: 0, + 21: 1, + 19: 0, + 62: 0, + 16: 0, + 11: 0, + 51: 2, + 44: 1, + 61: 1, + 58: 1, + 59: 1, + 60: 1, + 63: 1, + 14: 1, + 15: 1, + 30: 0, + 25: 0, + 24: 1, + 31: 1, + 29: 1, + 42: 0, + 56: 0}, + 2:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 3, + 26: 3, + 17: 3, + 1: 2, + 27: 2, + 12: 3, + 20: 3, + 9: 3, + 22: 3, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 2, + 23: 3, + 10: 3, + 5: 3, + 3: 3, + 21: 3, + 19: 3, + 62: 1, + 16: 2, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 1, + 15: 1, + 30: 1, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 0, + 56: 0}, + 18:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 1, + 17: 1, + 1: 3, + 27: 1, + 12: 1, + 20: 1, + 9: 3, + 22: 2, + 7: 2, + 6: 2, + 13: 1, + 4: 2, + 8: 3, + 23: 1, + 10: 3, + 5: 2, + 3: 1, + 21: 3, + 19: 1, + 62: 0, + 16: 1, + 11: 1, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 3, + 24: 2, + 31: 2, + 29: 2, + 42: 2, + 56: 1}, + 26:{ + 28: 0, + 40: 0, + 54: 1, + 45: 0, + 32: 0, + 50: 0, + 49: 1, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 2, + 18: 1, + 26: 2, + 17: 1, + 1: 3, + 27: 1, + 12: 1, + 20: 3, + 9: 3, + 22: 1, + 7: 2, + 6: 1, + 13: 1, + 4: 1, + 8: 3, + 23: 1, + 10: 2, + 5: 3, + 3: 2, + 21: 2, + 19: 1, + 62: 0, + 16: 1, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 2, + 15: 2, + 30: 2, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 0, + 56: 0}, + 17:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 2, + 26: 1, + 17: 2, + 1: 3, + 27: 1, + 12: 1, + 20: 2, + 9: 3, + 22: 3, + 7: 2, + 6: 1, + 13: 2, + 4: 3, + 8: 3, + 23: 1, + 10: 3, + 5: 3, + 3: 3, + 21: 3, + 19: 3, + 62: 0, + 16: 2, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 3, + 25: 3, + 24: 3, + 31: 2, + 29: 2, + 42: 2, + 56: 1}, + 1:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 2, + 18: 3, + 26: 3, + 17: 3, + 1: 2, + 27: 3, + 12: 3, + 20: 3, + 9: 3, + 22: 3, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 2, + 23: 3, + 10: 3, + 5: 3, + 3: 3, + 21: 2, + 19: 3, + 62: 2, + 16: 2, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 1, + 30: 1, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 0, + 56: 0}, + 27:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 1, + 26: 1, + 17: 1, + 1: 3, + 27: 2, + 12: 1, + 20: 1, + 9: 3, + 22: 2, + 7: 1, + 6: 1, + 13: 1, + 4: 1, + 8: 3, + 23: 0, + 10: 3, + 5: 1, + 3: 1, + 21: 2, + 19: 1, + 62: 0, + 16: 1, + 11: 0, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 1, + 25: 1, + 24: 3, + 31: 1, + 29: 2, + 42: 1, + 56: 1}, + 12:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 2, + 17: 2, + 1: 3, + 27: 2, + 12: 3, + 20: 3, + 9: 3, + 22: 3, + 7: 2, + 6: 3, + 13: 2, + 4: 3, + 8: 3, + 23: 1, + 10: 3, + 5: 3, + 3: 3, + 21: 3, + 19: 3, + 62: 0, + 16: 3, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 3, + 24: 2, + 31: 2, + 29: 2, + 42: 2, + 56: 1}, + 20:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 1, + 26: 1, + 17: 0, + 1: 3, + 27: 0, + 12: 1, + 20: 2, + 9: 3, + 22: 1, + 7: 1, + 6: 1, + 13: 1, + 4: 1, + 8: 3, + 23: 0, + 10: 1, + 5: 2, + 3: 1, + 21: 3, + 19: 1, + 62: 0, + 16: 2, + 11: 0, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 3, + 25: 2, + 24: 2, + 31: 2, + 29: 1, + 42: 1, + 56: 1}, + 9:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 3, + 17: 3, + 1: 3, + 27: 3, + 12: 3, + 20: 3, + 9: 2, + 22: 2, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 2, + 23: 2, + 10: 3, + 5: 3, + 3: 3, + 21: 3, + 19: 3, + 62: 1, + 16: 1, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 2, + 30: 1, + 25: 3, + 24: 1, + 31: 2, + 29: 1, + 42: 0, + 56: 1}, + 22:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 2, + 26: 1, + 17: 3, + 1: 3, + 27: 1, + 12: 1, + 20: 2, + 9: 1, + 22: 2, + 7: 2, + 6: 2, + 13: 1, + 4: 2, + 8: 3, + 23: 1, + 10: 2, + 5: 2, + 3: 3, + 21: 3, + 19: 1, + 62: 0, + 16: 0, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 1, + 25: 3, + 24: 3, + 31: 3, + 29: 2, + 42: 1, + 56: 1}, + 7:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 2, + 17: 1, + 1: 3, + 27: 1, + 12: 1, + 20: 2, + 9: 3, + 22: 2, + 7: 3, + 6: 3, + 13: 1, + 4: 3, + 8: 3, + 23: 1, + 10: 3, + 5: 3, + 3: 3, + 21: 3, + 19: 2, + 62: 0, + 16: 2, + 11: 1, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 3, + 25: 2, + 24: 3, + 31: 1, + 29: 3, + 42: 1, + 56: 1}, + 6:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 1, + 41: 0, + 34: 0, + 35: 1, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 2, + 26: 3, + 17: 3, + 1: 3, + 27: 3, + 12: 3, + 20: 3, + 9: 3, + 22: 3, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 3, + 23: 2, + 10: 2, + 5: 3, + 3: 3, + 21: 3, + 19: 3, + 62: 0, + 16: 3, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 3, + 25: 3, + 24: 3, + 31: 2, + 29: 2, + 42: 3, + 56: 1}, + 13:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 2, + 17: 1, + 1: 3, + 27: 1, + 12: 1, + 20: 2, + 9: 3, + 22: 2, + 7: 1, + 6: 3, + 13: 3, + 4: 2, + 8: 3, + 23: 3, + 10: 2, + 5: 2, + 3: 2, + 21: 3, + 19: 1, + 62: 0, + 16: 1, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 2, + 24: 2, + 31: 2, + 29: 2, + 42: 1, + 56: 2}, + 4:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 3, + 17: 3, + 1: 3, + 27: 2, + 12: 3, + 20: 3, + 9: 3, + 22: 2, + 7: 3, + 6: 2, + 13: 2, + 4: 3, + 8: 3, + 23: 2, + 10: 2, + 5: 3, + 3: 3, + 21: 3, + 19: 2, + 62: 1, + 16: 3, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 2, + 24: 3, + 31: 2, + 29: 3, + 42: 2, + 56: 1}, + 8:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 1, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 2, + 18: 3, + 26: 3, + 17: 3, + 1: 2, + 27: 2, + 12: 3, + 20: 3, + 9: 2, + 22: 2, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 1, + 23: 3, + 10: 3, + 5: 3, + 3: 3, + 21: 2, + 19: 3, + 62: 1, + 16: 1, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 1, + 15: 2, + 30: 1, + 25: 1, + 24: 1, + 31: 1, + 29: 1, + 42: 0, + 56: 0}, + 23:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 1, + 26: 2, + 17: 1, + 1: 3, + 27: 1, + 12: 1, + 20: 2, + 9: 3, + 22: 2, + 7: 2, + 6: 3, + 13: 1, + 4: 2, + 8: 3, + 23: 3, + 10: 3, + 5: 2, + 3: 2, + 21: 3, + 19: 2, + 62: 0, + 16: 1, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 2, + 24: 2, + 31: 1, + 29: 2, + 42: 1, + 56: 1}, + 10:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 3, + 17: 3, + 1: 3, + 27: 2, + 12: 3, + 20: 2, + 9: 3, + 22: 3, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 3, + 23: 2, + 10: 3, + 5: 3, + 3: 3, + 21: 3, + 19: 3, + 62: 1, + 16: 2, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 3, + 24: 3, + 31: 3, + 29: 3, + 42: 2, + 56: 2}, + 5:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 2, + 17: 2, + 1: 3, + 27: 2, + 12: 2, + 20: 2, + 9: 3, + 22: 1, + 7: 3, + 6: 2, + 13: 3, + 4: 3, + 8: 3, + 23: 2, + 10: 3, + 5: 3, + 3: 3, + 21: 3, + 19: 2, + 62: 0, + 16: 1, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 3, + 25: 3, + 24: 3, + 31: 3, + 29: 3, + 42: 2, + 56: 1}, + 3:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 3, + 26: 2, + 17: 1, + 1: 3, + 27: 2, + 12: 1, + 20: 3, + 9: 3, + 22: 3, + 7: 3, + 6: 3, + 13: 2, + 4: 3, + 8: 3, + 23: 1, + 10: 3, + 5: 3, + 3: 3, + 21: 3, + 19: 3, + 62: 0, + 16: 3, + 11: 1, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 3, + 24: 3, + 31: 3, + 29: 3, + 42: 3, + 56: 2}, + 21:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 2, + 26: 2, + 17: 3, + 1: 2, + 27: 1, + 12: 3, + 20: 2, + 9: 2, + 22: 2, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 1, + 23: 2, + 10: 3, + 5: 3, + 3: 3, + 21: 1, + 19: 3, + 62: 1, + 16: 1, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 2, + 15: 1, + 30: 1, + 25: 1, + 24: 0, + 31: 1, + 29: 0, + 42: 0, + 56: 0}, + 19:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 2, + 26: 1, + 17: 1, + 1: 3, + 27: 1, + 12: 1, + 20: 1, + 9: 3, + 22: 1, + 7: 1, + 6: 1, + 13: 1, + 4: 1, + 8: 3, + 23: 1, + 10: 1, + 5: 2, + 3: 2, + 21: 2, + 19: 2, + 62: 0, + 16: 1, + 11: 1, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 2, + 24: 2, + 31: 1, + 29: 2, + 42: 1, + 56: 1}, + 62:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 1, + 26: 1, + 17: 0, + 1: 1, + 27: 1, + 12: 0, + 20: 0, + 9: 1, + 22: 0, + 7: 1, + 6: 1, + 13: 1, + 4: 1, + 8: 1, + 23: 1, + 10: 1, + 5: 1, + 3: 1, + 21: 1, + 19: 0, + 62: 0, + 16: 0, + 11: 0, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 1, + 15: 1, + 30: 1, + 25: 1, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 16:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 2, + 26: 1, + 17: 1, + 1: 3, + 27: 2, + 12: 2, + 20: 2, + 9: 3, + 22: 2, + 7: 2, + 6: 2, + 13: 2, + 4: 3, + 8: 3, + 23: 2, + 10: 2, + 5: 3, + 3: 3, + 21: 3, + 19: 3, + 62: 0, + 16: 0, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 2, + 25: 2, + 24: 3, + 31: 2, + 29: 2, + 42: 1, + 56: 2}, + 11:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 3, + 18: 2, + 26: 1, + 17: 3, + 1: 3, + 27: 1, + 12: 2, + 20: 2, + 9: 3, + 22: 1, + 7: 3, + 6: 2, + 13: 3, + 4: 3, + 8: 3, + 23: 1, + 10: 2, + 5: 3, + 3: 3, + 21: 3, + 19: 2, + 62: 0, + 16: 1, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 3, + 15: 3, + 30: 3, + 25: 3, + 24: 3, + 31: 2, + 29: 3, + 42: 2, + 56: 1}, + 51:{ + 28: 0, + 40: 1, + 54: 1, + 45: 1, + 32: 0, + 50: 1, + 49: 2, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 2, + 34: 1, + 35: 2, + 47: 0, + 46: 1, + 43: 2, + 33: 2, + 37: 1, + 57: 0, + 48: 1, + 55: 0, + 52: 1, + 2: 0, + 18: 1, + 26: 1, + 17: 1, + 1: 0, + 27: 0, + 12: 1, + 20: 1, + 9: 0, + 22: 1, + 7: 1, + 6: 2, + 13: 2, + 4: 0, + 8: 0, + 23: 1, + 10: 1, + 5: 1, + 3: 1, + 21: 0, + 19: 0, + 62: 0, + 16: 0, + 11: 1, + 51: 0, + 44: 0, + 61: 1, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 44:{ + 28: 0, + 40: 1, + 54: 1, + 45: 1, + 32: 1, + 50: 0, + 49: 2, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 2, + 34: 1, + 35: 2, + 47: 0, + 46: 1, + 43: 2, + 33: 2, + 37: 2, + 57: 0, + 48: 1, + 55: 0, + 52: 1, + 2: 0, + 18: 1, + 26: 1, + 17: 1, + 1: 0, + 27: 0, + 12: 1, + 20: 1, + 9: 0, + 22: 1, + 7: 1, + 6: 2, + 13: 1, + 4: 2, + 8: 0, + 23: 1, + 10: 2, + 5: 3, + 3: 1, + 21: 0, + 19: 1, + 62: 0, + 16: 0, + 11: 0, + 51: 0, + 44: 1, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 61:{ + 28: 0, + 40: 1, + 54: 1, + 45: 1, + 32: 0, + 50: 1, + 49: 1, + 38: 0, + 39: 0, + 53: 1, + 36: 0, + 41: 1, + 34: 1, + 35: 1, + 47: 0, + 46: 1, + 43: 1, + 33: 1, + 37: 1, + 57: 0, + 48: 1, + 55: 0, + 52: 1, + 2: 0, + 18: 0, + 26: 0, + 17: 0, + 1: 0, + 27: 0, + 12: 2, + 20: 0, + 9: 0, + 22: 0, + 7: 0, + 6: 0, + 13: 1, + 4: 0, + 8: 0, + 23: 0, + 10: 1, + 5: 0, + 3: 1, + 21: 0, + 19: 0, + 62: 0, + 16: 0, + 11: 1, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 58:{ + 28: 1, + 40: 1, + 54: 1, + 45: 1, + 32: 0, + 50: 1, + 49: 1, + 38: 1, + 39: 1, + 53: 1, + 36: 1, + 41: 2, + 34: 1, + 35: 1, + 47: 0, + 46: 1, + 43: 1, + 33: 1, + 37: 1, + 57: 0, + 48: 1, + 55: 0, + 52: 1, + 2: 0, + 18: 1, + 26: 1, + 17: 1, + 1: 0, + 27: 0, + 12: 0, + 20: 2, + 9: 0, + 22: 0, + 7: 1, + 6: 1, + 13: 0, + 4: 1, + 8: 0, + 23: 1, + 10: 1, + 5: 1, + 3: 0, + 21: 0, + 19: 1, + 62: 0, + 16: 0, + 11: 1, + 51: 0, + 44: 1, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 59:{ + 28: 0, + 40: 1, + 54: 1, + 45: 1, + 32: 0, + 50: 0, + 49: 1, + 38: 1, + 39: 0, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 0, + 46: 1, + 43: 1, + 33: 1, + 37: 1, + 57: 0, + 48: 1, + 55: 0, + 52: 1, + 2: 0, + 18: 0, + 26: 1, + 17: 1, + 1: 0, + 27: 0, + 12: 0, + 20: 0, + 9: 0, + 22: 0, + 7: 1, + 6: 1, + 13: 1, + 4: 1, + 8: 0, + 23: 0, + 10: 2, + 5: 1, + 3: 1, + 21: 0, + 19: 1, + 62: 0, + 16: 0, + 11: 1, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 60:{ + 28: 0, + 40: 1, + 54: 1, + 45: 1, + 32: 0, + 50: 1, + 49: 1, + 38: 0, + 39: 0, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 0, + 46: 0, + 43: 1, + 33: 1, + 37: 1, + 57: 0, + 48: 1, + 55: 0, + 52: 1, + 2: 0, + 18: 0, + 26: 0, + 17: 0, + 1: 0, + 27: 0, + 12: 2, + 20: 0, + 9: 0, + 22: 2, + 7: 0, + 6: 0, + 13: 0, + 4: 1, + 8: 0, + 23: 0, + 10: 1, + 5: 1, + 3: 1, + 21: 0, + 19: 0, + 62: 0, + 16: 0, + 11: 0, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 63:{ + 28: 0, + 40: 1, + 54: 0, + 45: 1, + 32: 0, + 50: 0, + 49: 1, + 38: 1, + 39: 0, + 53: 1, + 36: 1, + 41: 1, + 34: 1, + 35: 1, + 47: 0, + 46: 0, + 43: 1, + 33: 1, + 37: 1, + 57: 0, + 48: 1, + 55: 0, + 52: 1, + 2: 0, + 18: 1, + 26: 0, + 17: 1, + 1: 0, + 27: 0, + 12: 1, + 20: 0, + 9: 0, + 22: 0, + 7: 0, + 6: 1, + 13: 0, + 4: 1, + 8: 0, + 23: 0, + 10: 1, + 5: 1, + 3: 1, + 21: 0, + 19: 1, + 62: 0, + 16: 0, + 11: 1, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 14:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 3, + 26: 3, + 17: 3, + 1: 1, + 27: 2, + 12: 3, + 20: 2, + 9: 2, + 22: 3, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 1, + 23: 2, + 10: 3, + 5: 3, + 3: 3, + 21: 2, + 19: 3, + 62: 0, + 16: 1, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 1, + 15: 2, + 30: 1, + 25: 0, + 24: 1, + 31: 0, + 29: 1, + 42: 0, + 56: 0}, + 15:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 3, + 26: 2, + 17: 3, + 1: 1, + 27: 1, + 12: 3, + 20: 3, + 9: 2, + 22: 2, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 1, + 23: 3, + 10: 3, + 5: 3, + 3: 3, + 21: 0, + 19: 3, + 62: 0, + 16: 0, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 1, + 15: 1, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 1, + 42: 0, + 56: 0}, + 30:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 0, + 18: 1, + 26: 2, + 17: 1, + 1: 0, + 27: 1, + 12: 3, + 20: 0, + 9: 0, + 22: 1, + 7: 1, + 6: 2, + 13: 2, + 4: 3, + 8: 0, + 23: 1, + 10: 3, + 5: 2, + 3: 3, + 21: 0, + 19: 3, + 62: 0, + 16: 0, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 25:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 2, + 18: 3, + 26: 2, + 17: 3, + 1: 1, + 27: 2, + 12: 2, + 20: 2, + 9: 2, + 22: 2, + 7: 3, + 6: 3, + 13: 2, + 4: 3, + 8: 1, + 23: 2, + 10: 3, + 5: 3, + 3: 3, + 21: 1, + 19: 2, + 62: 0, + 16: 0, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 1, + 15: 1, + 30: 1, + 25: 0, + 24: 1, + 31: 1, + 29: 1, + 42: 0, + 56: 0}, + 24:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 0, + 18: 3, + 26: 1, + 17: 2, + 1: 0, + 27: 1, + 12: 2, + 20: 1, + 9: 0, + 22: 1, + 7: 3, + 6: 3, + 13: 3, + 4: 3, + 8: 0, + 23: 2, + 10: 3, + 5: 3, + 3: 3, + 21: 0, + 19: 3, + 62: 0, + 16: 0, + 11: 3, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 31:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 1, + 26: 2, + 17: 1, + 1: 1, + 27: 2, + 12: 3, + 20: 1, + 9: 1, + 22: 3, + 7: 1, + 6: 3, + 13: 1, + 4: 2, + 8: 0, + 23: 1, + 10: 3, + 5: 3, + 3: 2, + 21: 1, + 19: 1, + 62: 0, + 16: 0, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 1, + 15: 1, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 29:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 1, + 26: 1, + 17: 2, + 1: 1, + 27: 1, + 12: 3, + 20: 2, + 9: 1, + 22: 1, + 7: 3, + 6: 3, + 13: 1, + 4: 3, + 8: 0, + 23: 1, + 10: 2, + 5: 2, + 3: 2, + 21: 0, + 19: 2, + 62: 0, + 16: 0, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 1, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}, + 42:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 2, + 26: 1, + 17: 2, + 1: 1, + 27: 1, + 12: 1, + 20: 1, + 9: 1, + 22: 1, + 7: 2, + 6: 3, + 13: 1, + 4: 2, + 8: 1, + 23: 1, + 10: 2, + 5: 2, + 3: 2, + 21: 1, + 19: 1, + 62: 0, + 16: 0, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 1, + 30: 1, + 25: 0, + 24: 0, + 31: 0, + 29: 1, + 42: 0, + 56: 0}, + 56:{ + 28: 0, + 40: 0, + 54: 0, + 45: 0, + 32: 0, + 50: 0, + 49: 0, + 38: 0, + 39: 0, + 53: 0, + 36: 0, + 41: 0, + 34: 0, + 35: 0, + 47: 0, + 46: 0, + 43: 0, + 33: 0, + 37: 0, + 57: 0, + 48: 0, + 55: 0, + 52: 0, + 2: 1, + 18: 1, + 26: 0, + 17: 1, + 1: 1, + 27: 1, + 12: 1, + 20: 1, + 9: 1, + 22: 1, + 7: 1, + 6: 1, + 13: 0, + 4: 2, + 8: 0, + 23: 0, + 10: 1, + 5: 1, + 3: 1, + 21: 0, + 19: 1, + 62: 0, + 16: 0, + 11: 2, + 51: 0, + 44: 0, + 61: 0, + 58: 0, + 59: 0, + 60: 0, + 63: 0, + 14: 0, + 15: 0, + 30: 0, + 25: 0, + 24: 0, + 31: 0, + 29: 0, + 42: 0, + 56: 0}} +WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 28, + 66: 40, + 67: 54, + 68: 45, + 69: 32, + 70: 50, + 71: 49, + 72: 38, + 73: 39, + 74: 53, + 75: 36, + 76: 41, + 77: 34, + 78: 35, + 79: 47, + 80: 46, + 81: 72, + 82: 43, + 83: 33, + 84: 37, + 85: 57, + 86: 48, + 87: 64, + 88: 68, + 89: 55, + 90: 52, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 2, + 98: 18, + 99: 26, + 100: 17, + 101: 1, + 102: 27, + 103: 12, + 104: 20, + 105: 9, + 106: 22, + 107: 7, + 108: 6, + 109: 13, + 110: 4, + 111: 8, + 112: 23, + 113: 67, + 114: 10, + 115: 5, + 116: 3, + 117: 21, + 118: 19, + 119: 65, + 120: 62, + 121: 16, + 122: 11, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 161, + 129: 162, + 130: 163, + 131: 164, + 132: 165, + 133: 166, + 134: 167, + 135: 168, + 136: 169, + 137: 170, + 138: 171, + 139: 172, + 140: 173, + 141: 174, + 142: 175, + 143: 176, + 144: 177, + 145: 178, + 146: 179, + 147: 180, + 148: 78, + 149: 181, + 150: 69, + 151: 182, + 152: 183, + 153: 184, + 154: 185, + 155: 186, + 156: 187, + 157: 188, + 158: 189, + 159: 190, + 160: 191, + 161: 192, + 162: 193, + 163: 194, + 164: 195, + 165: 196, + 166: 197, + 167: 76, + 168: 198, + 169: 199, + 170: 200, + 171: 201, + 172: 202, + 173: 203, + 174: 204, + 175: 205, + 176: 81, + 177: 206, + 178: 207, + 179: 208, + 180: 209, + 181: 210, + 182: 211, + 183: 212, + 184: 213, + 185: 214, + 186: 215, + 187: 216, + 188: 217, + 189: 218, + 190: 219, + 191: 220, + 192: 221, + 193: 51, + 194: 83, + 195: 222, + 196: 80, + 197: 223, + 198: 224, + 199: 225, + 200: 226, + 201: 44, + 202: 227, + 203: 228, + 204: 229, + 205: 61, + 206: 230, + 207: 231, + 208: 232, + 209: 233, + 210: 234, + 211: 58, + 212: 235, + 213: 66, + 214: 59, + 215: 236, + 216: 237, + 217: 238, + 218: 60, + 219: 70, + 220: 63, + 221: 239, + 222: 240, + 223: 241, + 224: 84, + 225: 14, + 226: 75, + 227: 242, + 228: 71, + 229: 82, + 230: 243, + 231: 73, + 232: 244, + 233: 15, + 234: 85, + 235: 79, + 236: 86, + 237: 30, + 238: 77, + 239: 87, + 240: 245, + 241: 246, + 242: 247, + 243: 25, + 244: 74, + 245: 42, + 246: 24, + 247: 248, + 248: 249, + 249: 250, + 250: 31, + 251: 56, + 252: 29, + 253: 251, + 254: 252, + 255: 253} +WINDOWS_1250_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name="windows-1250", language="Hungarian", + char_to_order_map=WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet="ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű") +ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 28, + 66: 40, + 67: 54, + 68: 45, + 69: 32, + 70: 50, + 71: 49, + 72: 38, + 73: 39, + 74: 53, + 75: 36, + 76: 41, + 77: 34, + 78: 35, + 79: 47, + 80: 46, + 81: 71, + 82: 43, + 83: 33, + 84: 37, + 85: 57, + 86: 48, + 87: 64, + 88: 68, + 89: 55, + 90: 52, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 2, + 98: 18, + 99: 26, + 100: 17, + 101: 1, + 102: 27, + 103: 12, + 104: 20, + 105: 9, + 106: 22, + 107: 7, + 108: 6, + 109: 13, + 110: 4, + 111: 8, + 112: 23, + 113: 67, + 114: 10, + 115: 5, + 116: 3, + 117: 21, + 118: 19, + 119: 65, + 120: 62, + 121: 16, + 122: 11, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 159, + 129: 160, + 130: 161, + 131: 162, + 132: 163, + 133: 164, + 134: 165, + 135: 166, + 136: 167, + 137: 168, + 138: 169, + 139: 170, + 140: 171, + 141: 172, + 142: 173, + 143: 174, + 144: 175, + 145: 176, + 146: 177, + 147: 178, + 148: 179, + 149: 180, + 150: 181, + 151: 182, + 152: 183, + 153: 184, + 154: 185, + 155: 186, + 156: 187, + 157: 188, + 158: 189, + 159: 190, + 160: 191, + 161: 192, + 162: 193, + 163: 194, + 164: 195, + 165: 196, + 166: 197, + 167: 75, + 168: 198, + 169: 199, + 170: 200, + 171: 201, + 172: 202, + 173: 203, + 174: 204, + 175: 205, + 176: 79, + 177: 206, + 178: 207, + 179: 208, + 180: 209, + 181: 210, + 182: 211, + 183: 212, + 184: 213, + 185: 214, + 186: 215, + 187: 216, + 188: 217, + 189: 218, + 190: 219, + 191: 220, + 192: 221, + 193: 51, + 194: 81, + 195: 222, + 196: 78, + 197: 223, + 198: 224, + 199: 225, + 200: 226, + 201: 44, + 202: 227, + 203: 228, + 204: 229, + 205: 61, + 206: 230, + 207: 231, + 208: 232, + 209: 233, + 210: 234, + 211: 58, + 212: 235, + 213: 66, + 214: 59, + 215: 236, + 216: 237, + 217: 238, + 218: 60, + 219: 69, + 220: 63, + 221: 239, + 222: 240, + 223: 241, + 224: 82, + 225: 14, + 226: 74, + 227: 242, + 228: 70, + 229: 80, + 230: 243, + 231: 72, + 232: 244, + 233: 15, + 234: 83, + 235: 77, + 236: 84, + 237: 30, + 238: 76, + 239: 85, + 240: 245, + 241: 246, + 242: 247, + 243: 25, + 244: 73, + 245: 42, + 246: 24, + 247: 248, + 248: 249, + 249: 250, + 250: 31, + 251: 56, + 252: 29, + 253: 251, + 254: 252, + 255: 253} +ISO_8859_2_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name="ISO-8859-2", language="Hungarian", + char_to_order_map=ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet="ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÁÉÍÓÖÚÜáéíóöúüŐőŰű") diff --git a/APPS_UNCOMPILED/lib/chardet/langrussianmodel.py b/APPS_UNCOMPILED/lib/chardet/langrussianmodel.py new file mode 100644 index 0000000..466b273 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/langrussianmodel.py @@ -0,0 +1,5618 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/langrussianmodel.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 131168 bytes +from chardet.sbcharsetprober import SingleByteCharSetModel +RUSSIAN_LANG_MODEL = {37:{ + 37: 0, + 44: 1, + 33: 1, + 46: 1, + 41: 1, + 48: 1, + 56: 1, + 51: 1, + 42: 1, + 60: 1, + 36: 1, + 49: 1, + 38: 1, + 31: 2, + 34: 1, + 35: 1, + 45: 1, + 32: 1, + 40: 1, + 52: 1, + 53: 1, + 55: 1, + 58: 1, + 50: 1, + 57: 1, + 63: 1, + 62: 0, + 61: 0, + 47: 0, + 59: 1, + 43: 1, + 3: 1, + 21: 2, + 10: 2, + 19: 2, + 13: 2, + 2: 0, + 24: 1, + 20: 1, + 4: 0, + 23: 1, + 11: 2, + 8: 3, + 12: 2, + 5: 2, + 1: 0, + 15: 2, + 9: 2, + 7: 2, + 6: 2, + 14: 2, + 39: 2, + 26: 2, + 28: 0, + 22: 1, + 25: 2, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 1, + 27: 0, + 16: 0}, + 44:{ + 37: 1, + 44: 0, + 33: 1, + 46: 1, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 0, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 0, + 45: 1, + 32: 0, + 40: 0, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 1, + 61: 0, + 47: 0, + 59: 0, + 43: 1, + 3: 2, + 21: 0, + 10: 0, + 19: 0, + 13: 1, + 2: 3, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 2, + 12: 0, + 5: 0, + 1: 3, + 15: 0, + 9: 2, + 7: 0, + 6: 0, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 2, + 17: 1, + 30: 2, + 27: 1, + 16: 1}, + 33:{ + 37: 2, + 44: 0, + 33: 1, + 46: 0, + 41: 1, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 1, + 45: 1, + 32: 1, + 40: 1, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 1, + 63: 0, + 62: 1, + 61: 1, + 47: 0, + 59: 0, + 43: 1, + 3: 2, + 21: 1, + 10: 1, + 19: 1, + 13: 2, + 2: 3, + 24: 0, + 20: 2, + 4: 2, + 23: 0, + 11: 1, + 8: 2, + 12: 2, + 5: 2, + 1: 3, + 15: 2, + 9: 2, + 7: 3, + 6: 2, + 14: 2, + 39: 0, + 26: 1, + 28: 1, + 22: 2, + 25: 1, + 29: 0, + 54: 1, + 18: 3, + 17: 1, + 30: 2, + 27: 0, + 16: 1}, + 46:{ + 37: 1, + 44: 1, + 33: 0, + 46: 0, + 41: 1, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 0, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 1, + 45: 1, + 32: 0, + 40: 0, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 0, + 10: 1, + 19: 0, + 13: 2, + 2: 2, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 2, + 12: 1, + 5: 1, + 1: 3, + 15: 0, + 9: 2, + 7: 0, + 6: 0, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 1, + 30: 1, + 27: 1, + 16: 0}, + 41:{ + 37: 1, + 44: 0, + 33: 1, + 46: 0, + 41: 0, + 48: 2, + 56: 1, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 1, + 38: 0, + 31: 1, + 34: 1, + 35: 0, + 45: 1, + 32: 1, + 40: 0, + 52: 1, + 53: 0, + 55: 0, + 58: 1, + 50: 1, + 57: 0, + 63: 0, + 62: 1, + 61: 1, + 47: 0, + 59: 0, + 43: 1, + 3: 3, + 21: 0, + 10: 2, + 19: 0, + 13: 0, + 2: 2, + 24: 3, + 20: 1, + 4: 2, + 23: 0, + 11: 0, + 8: 2, + 12: 1, + 5: 1, + 1: 3, + 15: 0, + 9: 2, + 7: 0, + 6: 0, + 14: 2, + 39: 0, + 26: 1, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 1, + 17: 1, + 30: 2, + 27: 1, + 16: 1}, + 48:{ + 37: 1, + 44: 1, + 33: 1, + 46: 1, + 41: 1, + 48: 1, + 56: 1, + 51: 1, + 42: 1, + 60: 1, + 36: 1, + 49: 1, + 38: 1, + 31: 2, + 34: 1, + 35: 1, + 45: 2, + 32: 2, + 40: 1, + 52: 0, + 53: 0, + 55: 1, + 58: 1, + 50: 1, + 57: 1, + 63: 1, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 1, + 3: 0, + 21: 0, + 10: 2, + 19: 2, + 13: 2, + 2: 2, + 24: 1, + 20: 1, + 4: 0, + 23: 2, + 11: 1, + 8: 2, + 12: 2, + 5: 1, + 1: 0, + 15: 1, + 9: 1, + 7: 3, + 6: 0, + 14: 0, + 39: 1, + 26: 1, + 28: 0, + 22: 0, + 25: 1, + 29: 2, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 1, + 16: 0}, + 56:{ + 37: 1, + 44: 0, + 33: 0, + 46: 0, + 41: 1, + 48: 1, + 56: 0, + 51: 1, + 42: 1, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 1, + 34: 1, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 1, + 10: 0, + 19: 1, + 13: 1, + 2: 2, + 24: 1, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 0, + 12: 1, + 5: 0, + 1: 2, + 15: 0, + 9: 1, + 7: 0, + 6: 0, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 2, + 16: 0}, + 51:{ + 37: 1, + 44: 0, + 33: 1, + 46: 1, + 41: 1, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 0, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 0, + 45: 1, + 32: 0, + 40: 0, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 1, + 61: 1, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 1, + 10: 2, + 19: 0, + 13: 2, + 2: 2, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 1, + 12: 1, + 5: 2, + 1: 2, + 15: 0, + 9: 1, + 7: 0, + 6: 0, + 14: 1, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 1, + 17: 0, + 30: 0, + 27: 0, + 16: 1}, + 42:{ + 37: 1, + 44: 1, + 33: 1, + 46: 1, + 41: 1, + 48: 2, + 56: 1, + 51: 1, + 42: 1, + 60: 1, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 1, + 45: 1, + 32: 2, + 40: 1, + 52: 0, + 53: 1, + 55: 1, + 58: 1, + 50: 1, + 57: 0, + 63: 1, + 62: 0, + 61: 0, + 47: 0, + 59: 1, + 43: 1, + 3: 1, + 21: 2, + 10: 2, + 19: 2, + 13: 2, + 2: 2, + 24: 0, + 20: 2, + 4: 1, + 23: 0, + 11: 1, + 8: 2, + 12: 2, + 5: 2, + 1: 1, + 15: 1, + 9: 2, + 7: 2, + 6: 2, + 14: 1, + 39: 1, + 26: 2, + 28: 0, + 22: 0, + 25: 1, + 29: 1, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 1, + 16: 0}, + 60:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 1, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 1, + 49: 1, + 38: 0, + 31: 1, + 34: 0, + 35: 0, + 45: 0, + 32: 1, + 40: 1, + 52: 0, + 53: 0, + 55: 1, + 58: 1, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 0, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 1, + 24: 0, + 20: 0, + 4: 0, + 23: 0, + 11: 0, + 8: 0, + 12: 0, + 5: 0, + 1: 2, + 15: 0, + 9: 0, + 7: 0, + 6: 0, + 14: 0, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 0}, + 36:{ + 37: 2, + 44: 0, + 33: 1, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 1, + 42: 1, + 60: 0, + 36: 0, + 49: 1, + 38: 0, + 31: 1, + 34: 2, + 35: 1, + 45: 1, + 32: 1, + 40: 1, + 52: 1, + 53: 0, + 55: 0, + 58: 1, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 0, + 10: 1, + 19: 0, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 2, + 12: 0, + 5: 1, + 1: 3, + 15: 0, + 9: 2, + 7: 2, + 6: 2, + 14: 2, + 39: 0, + 26: 1, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 1, + 17: 1, + 30: 2, + 27: 1, + 16: 0}, + 49:{ + 37: 2, + 44: 0, + 33: 0, + 46: 1, + 41: 0, + 48: 1, + 56: 1, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 1, + 38: 1, + 31: 0, + 34: 1, + 35: 1, + 45: 0, + 32: 1, + 40: 1, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 1, + 57: 0, + 63: 0, + 62: 1, + 61: 1, + 47: 0, + 59: 1, + 43: 1, + 3: 2, + 21: 0, + 10: 0, + 19: 1, + 13: 0, + 2: 2, + 24: 1, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 1, + 12: 0, + 5: 1, + 1: 2, + 15: 0, + 9: 0, + 7: 0, + 6: 0, + 14: 2, + 39: 0, + 26: 1, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 1, + 17: 1, + 30: 2, + 27: 2, + 16: 1}, + 38:{ + 37: 1, + 44: 1, + 33: 1, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 1, + 45: 1, + 32: 1, + 40: 1, + 52: 1, + 53: 1, + 55: 1, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 1, + 61: 0, + 47: 1, + 59: 0, + 43: 1, + 3: 3, + 21: 0, + 10: 0, + 19: 1, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 3, + 23: 0, + 11: 0, + 8: 1, + 12: 1, + 5: 2, + 1: 3, + 15: 0, + 9: 1, + 7: 1, + 6: 0, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 3, + 17: 1, + 30: 2, + 27: 1, + 16: 1}, + 31:{ + 37: 2, + 44: 0, + 33: 0, + 46: 1, + 41: 1, + 48: 1, + 56: 0, + 51: 1, + 42: 2, + 60: 0, + 36: 1, + 49: 0, + 38: 0, + 31: 1, + 34: 1, + 35: 0, + 45: 1, + 32: 1, + 40: 1, + 52: 1, + 53: 1, + 55: 1, + 58: 1, + 50: 1, + 57: 0, + 63: 0, + 62: 1, + 61: 1, + 47: 1, + 59: 0, + 43: 1, + 3: 3, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 3, + 24: 0, + 20: 0, + 4: 3, + 23: 0, + 11: 0, + 8: 0, + 12: 0, + 5: 0, + 1: 3, + 15: 0, + 9: 1, + 7: 0, + 6: 0, + 14: 3, + 39: 0, + 26: 1, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 1, + 17: 2, + 30: 1, + 27: 1, + 16: 1}, + 34:{ + 37: 0, + 44: 1, + 33: 1, + 46: 1, + 41: 2, + 48: 1, + 56: 1, + 51: 1, + 42: 1, + 60: 1, + 36: 1, + 49: 2, + 38: 1, + 31: 2, + 34: 1, + 35: 1, + 45: 2, + 32: 1, + 40: 1, + 52: 1, + 53: 1, + 55: 1, + 58: 0, + 50: 1, + 57: 1, + 63: 1, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 1, + 3: 1, + 21: 2, + 10: 1, + 19: 2, + 13: 2, + 2: 0, + 24: 1, + 20: 1, + 4: 0, + 23: 1, + 11: 2, + 8: 2, + 12: 1, + 5: 3, + 1: 0, + 15: 2, + 9: 2, + 7: 2, + 6: 2, + 14: 1, + 39: 1, + 26: 2, + 28: 1, + 22: 2, + 25: 2, + 29: 1, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 0}, + 35:{ + 37: 1, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 0, + 49: 1, + 38: 0, + 31: 1, + 34: 1, + 35: 1, + 45: 2, + 32: 1, + 40: 1, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 1, + 61: 1, + 47: 0, + 59: 0, + 43: 1, + 3: 2, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 2, + 12: 0, + 5: 1, + 1: 3, + 15: 0, + 9: 3, + 7: 1, + 6: 1, + 14: 2, + 39: 1, + 26: 0, + 28: 0, + 22: 0, + 25: 1, + 29: 0, + 54: 0, + 18: 1, + 17: 2, + 30: 1, + 27: 0, + 16: 2}, + 45:{ + 37: 2, + 44: 1, + 33: 1, + 46: 1, + 41: 1, + 48: 2, + 56: 1, + 51: 0, + 42: 2, + 60: 0, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 2, + 35: 0, + 45: 1, + 32: 1, + 40: 1, + 52: 1, + 53: 0, + 55: 1, + 58: 1, + 50: 1, + 57: 1, + 63: 0, + 62: 1, + 61: 1, + 47: 1, + 59: 1, + 43: 1, + 3: 3, + 21: 0, + 10: 1, + 19: 0, + 13: 0, + 2: 2, + 24: 1, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 0, + 12: 0, + 5: 0, + 1: 3, + 15: 0, + 9: 1, + 7: 0, + 6: 0, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 2, + 17: 0, + 30: 1, + 27: 1, + 16: 2}, + 32:{ + 37: 1, + 44: 1, + 33: 1, + 46: 1, + 41: 1, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 1, + 45: 1, + 32: 1, + 40: 2, + 52: 1, + 53: 0, + 55: 1, + 58: 1, + 50: 1, + 57: 1, + 63: 0, + 62: 1, + 61: 1, + 47: 1, + 59: 1, + 43: 1, + 3: 2, + 21: 1, + 10: 2, + 19: 1, + 13: 2, + 2: 3, + 24: 1, + 20: 1, + 4: 2, + 23: 0, + 11: 2, + 8: 2, + 12: 2, + 5: 2, + 1: 2, + 15: 2, + 9: 2, + 7: 1, + 6: 3, + 14: 2, + 39: 1, + 26: 1, + 28: 1, + 22: 1, + 25: 0, + 29: 0, + 54: 1, + 18: 1, + 17: 1, + 30: 2, + 27: 1, + 16: 1}, + 40:{ + 37: 1, + 44: 0, + 33: 1, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 2, + 35: 0, + 45: 1, + 32: 1, + 40: 1, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 1, + 57: 0, + 63: 0, + 62: 1, + 61: 1, + 47: 1, + 59: 1, + 43: 1, + 3: 3, + 21: 1, + 10: 2, + 19: 0, + 13: 0, + 2: 3, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 1, + 8: 1, + 12: 0, + 5: 0, + 1: 3, + 15: 0, + 9: 2, + 7: 1, + 6: 0, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 1, + 54: 0, + 18: 3, + 17: 1, + 30: 2, + 27: 1, + 16: 1}, + 52:{ + 37: 1, + 44: 1, + 33: 1, + 46: 1, + 41: 1, + 48: 1, + 56: 1, + 51: 0, + 42: 0, + 60: 1, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 1, + 45: 1, + 32: 1, + 40: 1, + 52: 0, + 53: 0, + 55: 1, + 58: 0, + 50: 1, + 57: 1, + 63: 1, + 62: 0, + 61: 0, + 47: 0, + 59: 1, + 43: 0, + 3: 1, + 21: 2, + 10: 2, + 19: 1, + 13: 2, + 2: 1, + 24: 2, + 20: 2, + 4: 2, + 23: 1, + 11: 1, + 8: 2, + 12: 2, + 5: 1, + 1: 2, + 15: 1, + 9: 2, + 7: 2, + 6: 2, + 14: 0, + 39: 1, + 26: 1, + 28: 1, + 22: 2, + 25: 1, + 29: 1, + 54: 0, + 18: 0, + 17: 0, + 30: 2, + 27: 1, + 16: 0}, + 53:{ + 37: 1, + 44: 1, + 33: 0, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 0, + 49: 1, + 38: 0, + 31: 0, + 34: 1, + 35: 0, + 45: 1, + 32: 0, + 40: 0, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 2, + 12: 0, + 5: 0, + 1: 2, + 15: 0, + 9: 2, + 7: 0, + 6: 1, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 1, + 30: 2, + 27: 0, + 16: 0}, + 55:{ + 37: 1, + 44: 0, + 33: 1, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 0, + 49: 1, + 38: 1, + 31: 1, + 34: 1, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 0, + 10: 2, + 19: 0, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 2, + 12: 1, + 5: 0, + 1: 2, + 15: 0, + 9: 2, + 7: 0, + 6: 0, + 14: 1, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 1, + 30: 1, + 27: 0, + 16: 0}, + 58:{ + 37: 1, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 0, + 38: 0, + 31: 0, + 34: 1, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 1, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 1, + 21: 0, + 10: 1, + 19: 0, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 0, + 12: 0, + 5: 0, + 1: 0, + 15: 0, + 9: 0, + 7: 0, + 6: 0, + 14: 1, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 1, + 17: 0, + 30: 0, + 27: 1, + 16: 0}, + 50:{ + 37: 1, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 0, + 38: 0, + 31: 1, + 34: 0, + 35: 1, + 45: 0, + 32: 0, + 40: 1, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 1, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 2, + 23: 0, + 11: 0, + 8: 1, + 12: 0, + 5: 0, + 1: 1, + 15: 0, + 9: 1, + 7: 0, + 6: 3, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 1, + 30: 0, + 27: 0, + 16: 0}, + 57:{ + 37: 1, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 1, + 38: 0, + 31: 1, + 34: 1, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 1, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 0, + 10: 1, + 19: 0, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 1, + 23: 0, + 11: 1, + 8: 2, + 12: 1, + 5: 1, + 1: 2, + 15: 2, + 9: 1, + 7: 0, + 6: 2, + 14: 2, + 39: 0, + 26: 1, + 28: 0, + 22: 0, + 25: 1, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 1, + 27: 0, + 16: 0}, + 63:{ + 37: 1, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 1, + 47: 0, + 59: 0, + 43: 0, + 3: 1, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 1, + 24: 0, + 20: 0, + 4: 1, + 23: 0, + 11: 0, + 8: 0, + 12: 0, + 5: 0, + 1: 1, + 15: 0, + 9: 0, + 7: 0, + 6: 0, + 14: 1, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 0}, + 62:{ + 37: 0, + 44: 0, + 33: 1, + 46: 1, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 0, + 60: 1, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 0, + 35: 1, + 45: 1, + 32: 1, + 40: 1, + 52: 0, + 53: 0, + 55: 1, + 58: 1, + 50: 0, + 57: 1, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 0, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 0, + 24: 0, + 20: 0, + 4: 0, + 23: 0, + 11: 0, + 8: 0, + 12: 0, + 5: 0, + 1: 0, + 15: 0, + 9: 0, + 7: 0, + 6: 0, + 14: 0, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 0}, + 61:{ + 37: 0, + 44: 1, + 33: 1, + 46: 0, + 41: 1, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 0, + 38: 1, + 31: 1, + 34: 1, + 35: 0, + 45: 0, + 32: 1, + 40: 0, + 52: 0, + 53: 1, + 55: 0, + 58: 0, + 50: 0, + 57: 1, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 1, + 43: 1, + 3: 0, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 0, + 24: 0, + 20: 0, + 4: 0, + 23: 0, + 11: 0, + 8: 0, + 12: 0, + 5: 0, + 1: 0, + 15: 0, + 9: 0, + 7: 0, + 6: 0, + 14: 0, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 0}, + 47:{ + 37: 0, + 44: 0, + 33: 1, + 46: 0, + 41: 1, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 1, + 36: 1, + 49: 1, + 38: 1, + 31: 1, + 34: 0, + 35: 1, + 45: 1, + 32: 1, + 40: 1, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 1, + 21: 1, + 10: 2, + 19: 1, + 13: 2, + 2: 0, + 24: 1, + 20: 0, + 4: 0, + 23: 2, + 11: 2, + 8: 2, + 12: 2, + 5: 2, + 1: 0, + 15: 1, + 9: 2, + 7: 1, + 6: 3, + 14: 1, + 39: 1, + 26: 1, + 28: 0, + 22: 0, + 25: 1, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 0}, + 59:{ + 37: 1, + 44: 1, + 33: 0, + 46: 0, + 41: 1, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 1, + 34: 0, + 35: 0, + 45: 1, + 32: 0, + 40: 1, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 1, + 57: 0, + 63: 1, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 0, + 21: 1, + 10: 0, + 19: 1, + 13: 1, + 2: 0, + 24: 1, + 20: 0, + 4: 0, + 23: 0, + 11: 1, + 8: 2, + 12: 1, + 5: 2, + 1: 0, + 15: 1, + 9: 1, + 7: 1, + 6: 0, + 14: 0, + 39: 0, + 26: 1, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 0}, + 43:{ + 37: 0, + 44: 0, + 33: 1, + 46: 1, + 41: 0, + 48: 1, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 1, + 49: 0, + 38: 0, + 31: 1, + 34: 0, + 35: 0, + 45: 0, + 32: 1, + 40: 1, + 52: 0, + 53: 0, + 55: 1, + 58: 0, + 50: 1, + 57: 0, + 63: 1, + 62: 0, + 61: 0, + 47: 0, + 59: 1, + 43: 1, + 3: 0, + 21: 1, + 10: 1, + 19: 1, + 13: 1, + 2: 0, + 24: 0, + 20: 1, + 4: 0, + 23: 1, + 11: 1, + 8: 1, + 12: 1, + 5: 2, + 1: 0, + 15: 1, + 9: 1, + 7: 1, + 6: 0, + 14: 0, + 39: 0, + 26: 1, + 28: 0, + 22: 0, + 25: 1, + 29: 1, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 0}, + 3:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 1, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 1, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 3, + 10: 3, + 19: 3, + 13: 3, + 2: 3, + 24: 3, + 20: 3, + 4: 3, + 23: 3, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 2, + 15: 3, + 9: 3, + 7: 3, + 6: 3, + 14: 3, + 39: 2, + 26: 3, + 28: 3, + 22: 3, + 25: 3, + 29: 3, + 54: 0, + 18: 0, + 17: 0, + 30: 2, + 27: 3, + 16: 3}, + 21:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 1, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 2, + 19: 1, + 13: 2, + 2: 3, + 24: 2, + 20: 1, + 4: 3, + 23: 0, + 11: 2, + 8: 3, + 12: 2, + 5: 3, + 1: 3, + 15: 1, + 9: 3, + 7: 3, + 6: 2, + 14: 3, + 39: 0, + 26: 2, + 28: 1, + 22: 1, + 25: 2, + 29: 3, + 54: 2, + 18: 3, + 17: 2, + 30: 1, + 27: 2, + 16: 3}, + 10:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 2, + 19: 2, + 13: 3, + 2: 3, + 24: 1, + 20: 3, + 4: 3, + 23: 0, + 11: 3, + 8: 3, + 12: 2, + 5: 3, + 1: 3, + 15: 3, + 9: 3, + 7: 3, + 6: 3, + 14: 3, + 39: 1, + 26: 2, + 28: 2, + 22: 2, + 25: 3, + 29: 2, + 54: 2, + 18: 3, + 17: 3, + 30: 1, + 27: 1, + 16: 3}, + 19:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 1, + 10: 2, + 19: 1, + 13: 3, + 2: 3, + 24: 0, + 20: 1, + 4: 3, + 23: 0, + 11: 2, + 8: 3, + 12: 2, + 5: 3, + 1: 3, + 15: 0, + 9: 3, + 7: 2, + 6: 2, + 14: 3, + 39: 1, + 26: 1, + 28: 1, + 22: 2, + 25: 1, + 29: 0, + 54: 0, + 18: 1, + 17: 1, + 30: 1, + 27: 1, + 16: 0}, + 13:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 3, + 19: 2, + 13: 2, + 2: 3, + 24: 2, + 20: 2, + 4: 3, + 23: 0, + 11: 3, + 8: 3, + 12: 2, + 5: 3, + 1: 3, + 15: 2, + 9: 3, + 7: 3, + 6: 3, + 14: 3, + 39: 1, + 26: 2, + 28: 3, + 22: 2, + 25: 2, + 29: 1, + 54: 2, + 18: 3, + 17: 3, + 30: 1, + 27: 2, + 16: 3}, + 2:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 3, + 10: 3, + 19: 3, + 13: 3, + 2: 3, + 24: 3, + 20: 3, + 4: 2, + 23: 3, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 3, + 15: 3, + 9: 3, + 7: 3, + 6: 3, + 14: 2, + 39: 2, + 26: 3, + 28: 3, + 22: 3, + 25: 3, + 29: 3, + 54: 0, + 18: 0, + 17: 0, + 30: 1, + 27: 2, + 16: 3}, + 24:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 1, + 19: 2, + 13: 3, + 2: 3, + 24: 2, + 20: 1, + 4: 3, + 23: 0, + 11: 2, + 8: 2, + 12: 1, + 5: 3, + 1: 2, + 15: 1, + 9: 2, + 7: 2, + 6: 1, + 14: 3, + 39: 1, + 26: 0, + 28: 1, + 22: 2, + 25: 0, + 29: 0, + 54: 0, + 18: 1, + 17: 2, + 30: 1, + 27: 1, + 16: 1}, + 20:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 3, + 10: 3, + 19: 3, + 13: 3, + 2: 3, + 24: 2, + 20: 2, + 4: 3, + 23: 0, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 3, + 15: 0, + 9: 3, + 7: 2, + 6: 2, + 14: 3, + 39: 0, + 26: 0, + 28: 1, + 22: 2, + 25: 1, + 29: 0, + 54: 2, + 18: 3, + 17: 2, + 30: 1, + 27: 1, + 16: 3}, + 4:{ + 37: 1, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 1, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 3, + 10: 3, + 19: 3, + 13: 3, + 2: 3, + 24: 3, + 20: 3, + 4: 3, + 23: 3, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 3, + 15: 3, + 9: 3, + 7: 3, + 6: 3, + 14: 2, + 39: 2, + 26: 3, + 28: 3, + 22: 3, + 25: 3, + 29: 3, + 54: 0, + 18: 0, + 17: 0, + 30: 2, + 27: 3, + 16: 3}, + 23:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 1, + 21: 1, + 10: 1, + 19: 2, + 13: 3, + 2: 2, + 24: 0, + 20: 2, + 4: 1, + 23: 0, + 11: 2, + 8: 2, + 12: 2, + 5: 3, + 1: 2, + 15: 1, + 9: 2, + 7: 3, + 6: 3, + 14: 1, + 39: 2, + 26: 1, + 28: 2, + 22: 3, + 25: 2, + 29: 1, + 54: 0, + 18: 0, + 17: 0, + 30: 1, + 27: 1, + 16: 2}, + 11:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 1, + 10: 3, + 19: 1, + 13: 1, + 2: 3, + 24: 2, + 20: 2, + 4: 3, + 23: 0, + 11: 2, + 8: 3, + 12: 1, + 5: 3, + 1: 3, + 15: 0, + 9: 3, + 7: 3, + 6: 3, + 14: 3, + 39: 1, + 26: 2, + 28: 2, + 22: 1, + 25: 2, + 29: 0, + 54: 0, + 18: 1, + 17: 1, + 30: 1, + 27: 1, + 16: 1}, + 8:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 2, + 19: 3, + 13: 2, + 2: 3, + 24: 3, + 20: 2, + 4: 3, + 23: 0, + 11: 3, + 8: 3, + 12: 2, + 5: 3, + 1: 3, + 15: 2, + 9: 1, + 7: 3, + 6: 2, + 14: 3, + 39: 2, + 26: 2, + 28: 1, + 22: 3, + 25: 2, + 29: 1, + 54: 0, + 18: 3, + 17: 3, + 30: 1, + 27: 3, + 16: 3}, + 12:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 2, + 19: 2, + 13: 1, + 2: 3, + 24: 1, + 20: 1, + 4: 3, + 23: 0, + 11: 2, + 8: 3, + 12: 2, + 5: 3, + 1: 3, + 15: 2, + 9: 2, + 7: 3, + 6: 2, + 14: 3, + 39: 2, + 26: 2, + 28: 2, + 22: 2, + 25: 1, + 29: 1, + 54: 0, + 18: 3, + 17: 2, + 30: 2, + 27: 1, + 16: 3}, + 5:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 2, + 19: 3, + 13: 3, + 2: 3, + 24: 2, + 20: 2, + 4: 3, + 23: 0, + 11: 3, + 8: 2, + 12: 1, + 5: 3, + 1: 3, + 15: 1, + 9: 2, + 7: 3, + 6: 3, + 14: 3, + 39: 2, + 26: 2, + 28: 3, + 22: 3, + 25: 2, + 29: 2, + 54: 1, + 18: 3, + 17: 3, + 30: 1, + 27: 3, + 16: 3}, + 1:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 3, + 10: 3, + 19: 3, + 13: 3, + 2: 3, + 24: 3, + 20: 3, + 4: 3, + 23: 3, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 3, + 15: 3, + 9: 3, + 7: 3, + 6: 3, + 14: 2, + 39: 2, + 26: 3, + 28: 2, + 22: 3, + 25: 3, + 29: 3, + 54: 0, + 18: 0, + 17: 0, + 30: 2, + 27: 3, + 16: 3}, + 15:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 1, + 10: 0, + 19: 0, + 13: 0, + 2: 3, + 24: 0, + 20: 0, + 4: 3, + 23: 0, + 11: 2, + 8: 3, + 12: 1, + 5: 3, + 1: 3, + 15: 2, + 9: 3, + 7: 2, + 6: 2, + 14: 3, + 39: 1, + 26: 0, + 28: 2, + 22: 2, + 25: 1, + 29: 1, + 54: 0, + 18: 3, + 17: 2, + 30: 1, + 27: 1, + 16: 3}, + 9:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 3, + 19: 3, + 13: 3, + 2: 3, + 24: 3, + 20: 2, + 4: 3, + 23: 0, + 11: 3, + 8: 2, + 12: 3, + 5: 3, + 1: 3, + 15: 2, + 9: 2, + 7: 3, + 6: 3, + 14: 3, + 39: 2, + 26: 3, + 28: 2, + 22: 2, + 25: 3, + 29: 2, + 54: 0, + 18: 3, + 17: 3, + 30: 2, + 27: 2, + 16: 3}, + 7:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 1, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 3, + 19: 2, + 13: 3, + 2: 3, + 24: 2, + 20: 2, + 4: 3, + 23: 0, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 3, + 15: 3, + 9: 3, + 7: 3, + 6: 3, + 14: 3, + 39: 2, + 26: 3, + 28: 2, + 22: 3, + 25: 2, + 29: 1, + 54: 2, + 18: 3, + 17: 3, + 30: 2, + 27: 3, + 16: 3}, + 6:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 2, + 10: 3, + 19: 2, + 13: 2, + 2: 3, + 24: 1, + 20: 1, + 4: 3, + 23: 0, + 11: 3, + 8: 3, + 12: 2, + 5: 3, + 1: 3, + 15: 2, + 9: 3, + 7: 3, + 6: 2, + 14: 3, + 39: 2, + 26: 2, + 28: 2, + 22: 2, + 25: 2, + 29: 2, + 54: 2, + 18: 3, + 17: 3, + 30: 2, + 27: 2, + 16: 3}, + 14:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 3, + 10: 3, + 19: 3, + 13: 3, + 2: 3, + 24: 3, + 20: 3, + 4: 2, + 23: 2, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 2, + 15: 3, + 9: 3, + 7: 3, + 6: 3, + 14: 1, + 39: 2, + 26: 3, + 28: 2, + 22: 3, + 25: 3, + 29: 3, + 54: 0, + 18: 0, + 17: 0, + 30: 2, + 27: 3, + 16: 2}, + 39:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 1, + 10: 0, + 19: 1, + 13: 0, + 2: 3, + 24: 0, + 20: 0, + 4: 3, + 23: 0, + 11: 1, + 8: 2, + 12: 1, + 5: 1, + 1: 3, + 15: 1, + 9: 2, + 7: 2, + 6: 2, + 14: 2, + 39: 2, + 26: 0, + 28: 0, + 22: 1, + 25: 1, + 29: 0, + 54: 0, + 18: 2, + 17: 1, + 30: 2, + 27: 1, + 16: 1}, + 26:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 0, + 10: 3, + 19: 1, + 13: 1, + 2: 2, + 24: 0, + 20: 1, + 4: 3, + 23: 0, + 11: 1, + 8: 2, + 12: 2, + 5: 3, + 1: 3, + 15: 1, + 9: 3, + 7: 2, + 6: 2, + 14: 2, + 39: 1, + 26: 1, + 28: 1, + 22: 1, + 25: 2, + 29: 0, + 54: 1, + 18: 0, + 17: 1, + 30: 1, + 27: 1, + 16: 0}, + 28:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 1, + 10: 2, + 19: 1, + 13: 1, + 2: 3, + 24: 0, + 20: 1, + 4: 3, + 23: 0, + 11: 2, + 8: 1, + 12: 1, + 5: 1, + 1: 3, + 15: 0, + 9: 1, + 7: 0, + 6: 1, + 14: 3, + 39: 0, + 26: 0, + 28: 1, + 22: 0, + 25: 1, + 29: 0, + 54: 0, + 18: 3, + 17: 1, + 30: 0, + 27: 1, + 16: 0}, + 22:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 1, + 10: 1, + 19: 0, + 13: 0, + 2: 3, + 24: 1, + 20: 0, + 4: 3, + 23: 0, + 11: 3, + 8: 2, + 12: 1, + 5: 3, + 1: 2, + 15: 0, + 9: 2, + 7: 1, + 6: 3, + 14: 3, + 39: 1, + 26: 1, + 28: 0, + 22: 1, + 25: 2, + 29: 0, + 54: 0, + 18: 0, + 17: 3, + 30: 0, + 27: 0, + 16: 0}, + 25:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 1, + 10: 2, + 19: 1, + 13: 0, + 2: 3, + 24: 0, + 20: 0, + 4: 3, + 23: 0, + 11: 3, + 8: 3, + 12: 2, + 5: 3, + 1: 3, + 15: 2, + 9: 2, + 7: 1, + 6: 2, + 14: 3, + 39: 2, + 26: 1, + 28: 1, + 22: 1, + 25: 1, + 29: 0, + 54: 0, + 18: 0, + 17: 3, + 30: 1, + 27: 1, + 16: 0}, + 29:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 3, + 21: 0, + 10: 1, + 19: 0, + 13: 0, + 2: 3, + 24: 0, + 20: 0, + 4: 3, + 23: 0, + 11: 0, + 8: 0, + 12: 1, + 5: 2, + 1: 1, + 15: 0, + 9: 2, + 7: 0, + 6: 0, + 14: 2, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 2, + 30: 0, + 27: 0, + 16: 0}, + 54:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 0, + 21: 0, + 10: 0, + 19: 0, + 13: 0, + 2: 2, + 24: 0, + 20: 0, + 4: 0, + 23: 0, + 11: 0, + 8: 0, + 12: 0, + 5: 0, + 1: 0, + 15: 0, + 9: 0, + 7: 0, + 6: 0, + 14: 0, + 39: 0, + 26: 0, + 28: 0, + 22: 0, + 25: 0, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 1, + 16: 2}, + 18:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 0, + 21: 3, + 10: 3, + 19: 2, + 13: 2, + 2: 3, + 24: 2, + 20: 2, + 4: 2, + 23: 3, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 1, + 15: 3, + 9: 3, + 7: 3, + 6: 3, + 14: 1, + 39: 0, + 26: 3, + 28: 2, + 22: 3, + 25: 3, + 29: 2, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 0, + 16: 2}, + 17:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 0, + 21: 2, + 10: 2, + 19: 2, + 13: 2, + 2: 3, + 24: 1, + 20: 3, + 4: 2, + 23: 0, + 11: 3, + 8: 0, + 12: 3, + 5: 3, + 1: 2, + 15: 2, + 9: 1, + 7: 3, + 6: 2, + 14: 0, + 39: 2, + 26: 1, + 28: 2, + 22: 2, + 25: 3, + 29: 2, + 54: 0, + 18: 0, + 17: 0, + 30: 1, + 27: 3, + 16: 3}, + 30:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 1, + 31: 1, + 34: 0, + 35: 0, + 45: 1, + 32: 1, + 40: 0, + 52: 0, + 53: 1, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 0, + 21: 1, + 10: 1, + 19: 1, + 13: 2, + 2: 1, + 24: 0, + 20: 1, + 4: 0, + 23: 2, + 11: 2, + 8: 2, + 12: 2, + 5: 2, + 1: 0, + 15: 2, + 9: 2, + 7: 2, + 6: 3, + 14: 1, + 39: 2, + 26: 1, + 28: 0, + 22: 0, + 25: 1, + 29: 0, + 54: 0, + 18: 0, + 17: 0, + 30: 1, + 27: 1, + 16: 1}, + 27:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 2, + 21: 3, + 10: 1, + 19: 2, + 13: 3, + 2: 1, + 24: 2, + 20: 2, + 4: 1, + 23: 1, + 11: 2, + 8: 2, + 12: 2, + 5: 2, + 1: 1, + 15: 2, + 9: 2, + 7: 3, + 6: 3, + 14: 0, + 39: 1, + 26: 2, + 28: 2, + 22: 2, + 25: 2, + 29: 3, + 54: 0, + 18: 0, + 17: 0, + 30: 1, + 27: 2, + 16: 1}, + 16:{ + 37: 0, + 44: 0, + 33: 0, + 46: 0, + 41: 0, + 48: 0, + 56: 0, + 51: 0, + 42: 0, + 60: 0, + 36: 0, + 49: 0, + 38: 0, + 31: 0, + 34: 0, + 35: 0, + 45: 0, + 32: 0, + 40: 0, + 52: 0, + 53: 0, + 55: 0, + 58: 0, + 50: 0, + 57: 0, + 63: 0, + 62: 0, + 61: 0, + 47: 0, + 59: 0, + 43: 0, + 3: 0, + 21: 2, + 10: 3, + 19: 2, + 13: 3, + 2: 3, + 24: 3, + 20: 3, + 4: 2, + 23: 2, + 11: 3, + 8: 3, + 12: 3, + 5: 3, + 1: 0, + 15: 2, + 9: 2, + 7: 3, + 6: 3, + 14: 1, + 39: 1, + 26: 3, + 28: 2, + 22: 2, + 25: 2, + 29: 3, + 54: 0, + 18: 0, + 17: 0, + 30: 0, + 27: 2, + 16: 2}} +IBM866_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 142, + 66: 143, + 67: 144, + 68: 145, + 69: 146, + 70: 147, + 71: 148, + 72: 149, + 73: 150, + 74: 151, + 75: 152, + 76: 74, + 77: 153, + 78: 75, + 79: 154, + 80: 155, + 81: 156, + 82: 157, + 83: 158, + 84: 159, + 85: 160, + 86: 161, + 87: 162, + 88: 163, + 89: 164, + 90: 165, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 71, + 98: 172, + 99: 66, + 100: 173, + 101: 65, + 102: 174, + 103: 76, + 104: 175, + 105: 64, + 106: 176, + 107: 177, + 108: 77, + 109: 72, + 110: 178, + 111: 69, + 112: 67, + 113: 179, + 114: 78, + 115: 73, + 116: 180, + 117: 181, + 118: 79, + 119: 182, + 120: 183, + 121: 184, + 122: 185, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 37, + 129: 44, + 130: 33, + 131: 46, + 132: 41, + 133: 48, + 134: 56, + 135: 51, + 136: 42, + 137: 60, + 138: 36, + 139: 49, + 140: 38, + 141: 31, + 142: 34, + 143: 35, + 144: 45, + 145: 32, + 146: 40, + 147: 52, + 148: 53, + 149: 55, + 150: 58, + 151: 50, + 152: 57, + 153: 63, + 154: 70, + 155: 62, + 156: 61, + 157: 47, + 158: 59, + 159: 43, + 160: 3, + 161: 21, + 162: 10, + 163: 19, + 164: 13, + 165: 2, + 166: 24, + 167: 20, + 168: 4, + 169: 23, + 170: 11, + 171: 8, + 172: 12, + 173: 5, + 174: 1, + 175: 15, + 176: 191, + 177: 192, + 178: 193, + 179: 194, + 180: 195, + 181: 196, + 182: 197, + 183: 198, + 184: 199, + 185: 200, + 186: 201, + 187: 202, + 188: 203, + 189: 204, + 190: 205, + 191: 206, + 192: 207, + 193: 208, + 194: 209, + 195: 210, + 196: 211, + 197: 212, + 198: 213, + 199: 214, + 200: 215, + 201: 216, + 202: 217, + 203: 218, + 204: 219, + 205: 220, + 206: 221, + 207: 222, + 208: 223, + 209: 224, + 210: 225, + 211: 226, + 212: 227, + 213: 228, + 214: 229, + 215: 230, + 216: 231, + 217: 232, + 218: 233, + 219: 234, + 220: 235, + 221: 236, + 222: 237, + 223: 238, + 224: 9, + 225: 7, + 226: 6, + 227: 14, + 228: 39, + 229: 26, + 230: 28, + 231: 22, + 232: 25, + 233: 29, + 234: 54, + 235: 18, + 236: 17, + 237: 30, + 238: 27, + 239: 16, + 240: 239, + 241: 68, + 242: 240, + 243: 241, + 244: 242, + 245: 243, + 246: 244, + 247: 245, + 248: 246, + 249: 247, + 250: 248, + 251: 249, + 252: 250, + 253: 251, + 254: 252, + 255: 255} +IBM866_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name="IBM866", language="Russian", + char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё") +WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 142, + 66: 143, + 67: 144, + 68: 145, + 69: 146, + 70: 147, + 71: 148, + 72: 149, + 73: 150, + 74: 151, + 75: 152, + 76: 74, + 77: 153, + 78: 75, + 79: 154, + 80: 155, + 81: 156, + 82: 157, + 83: 158, + 84: 159, + 85: 160, + 86: 161, + 87: 162, + 88: 163, + 89: 164, + 90: 165, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 71, + 98: 172, + 99: 66, + 100: 173, + 101: 65, + 102: 174, + 103: 76, + 104: 175, + 105: 64, + 106: 176, + 107: 177, + 108: 77, + 109: 72, + 110: 178, + 111: 69, + 112: 67, + 113: 179, + 114: 78, + 115: 73, + 116: 180, + 117: 181, + 118: 79, + 119: 182, + 120: 183, + 121: 184, + 122: 185, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 191, + 129: 192, + 130: 193, + 131: 194, + 132: 195, + 133: 196, + 134: 197, + 135: 198, + 136: 199, + 137: 200, + 138: 201, + 139: 202, + 140: 203, + 141: 204, + 142: 205, + 143: 206, + 144: 207, + 145: 208, + 146: 209, + 147: 210, + 148: 211, + 149: 212, + 150: 213, + 151: 214, + 152: 215, + 153: 216, + 154: 217, + 155: 218, + 156: 219, + 157: 220, + 158: 221, + 159: 222, + 160: 223, + 161: 224, + 162: 225, + 163: 226, + 164: 227, + 165: 228, + 166: 229, + 167: 230, + 168: 231, + 169: 232, + 170: 233, + 171: 234, + 172: 235, + 173: 236, + 174: 237, + 175: 238, + 176: 239, + 177: 240, + 178: 241, + 179: 242, + 180: 243, + 181: 244, + 182: 245, + 183: 246, + 184: 68, + 185: 247, + 186: 248, + 187: 249, + 188: 250, + 189: 251, + 190: 252, + 191: 253, + 192: 37, + 193: 44, + 194: 33, + 195: 46, + 196: 41, + 197: 48, + 198: 56, + 199: 51, + 200: 42, + 201: 60, + 202: 36, + 203: 49, + 204: 38, + 205: 31, + 206: 34, + 207: 35, + 208: 45, + 209: 32, + 210: 40, + 211: 52, + 212: 53, + 213: 55, + 214: 58, + 215: 50, + 216: 57, + 217: 63, + 218: 70, + 219: 62, + 220: 61, + 221: 47, + 222: 59, + 223: 43, + 224: 3, + 225: 21, + 226: 10, + 227: 19, + 228: 13, + 229: 2, + 230: 24, + 231: 20, + 232: 4, + 233: 23, + 234: 11, + 235: 8, + 236: 12, + 237: 5, + 238: 1, + 239: 15, + 240: 9, + 241: 7, + 242: 6, + 243: 14, + 244: 39, + 245: 26, + 246: 28, + 247: 22, + 248: 25, + 249: 29, + 250: 54, + 251: 18, + 252: 17, + 253: 30, + 254: 27, + 255: 16} +WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name="windows-1251", language="Russian", + char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё") +IBM855_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 142, + 66: 143, + 67: 144, + 68: 145, + 69: 146, + 70: 147, + 71: 148, + 72: 149, + 73: 150, + 74: 151, + 75: 152, + 76: 74, + 77: 153, + 78: 75, + 79: 154, + 80: 155, + 81: 156, + 82: 157, + 83: 158, + 84: 159, + 85: 160, + 86: 161, + 87: 162, + 88: 163, + 89: 164, + 90: 165, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 71, + 98: 172, + 99: 66, + 100: 173, + 101: 65, + 102: 174, + 103: 76, + 104: 175, + 105: 64, + 106: 176, + 107: 177, + 108: 77, + 109: 72, + 110: 178, + 111: 69, + 112: 67, + 113: 179, + 114: 78, + 115: 73, + 116: 180, + 117: 181, + 118: 79, + 119: 182, + 120: 183, + 121: 184, + 122: 185, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 191, + 129: 192, + 130: 193, + 131: 194, + 132: 68, + 133: 195, + 134: 196, + 135: 197, + 136: 198, + 137: 199, + 138: 200, + 139: 201, + 140: 202, + 141: 203, + 142: 204, + 143: 205, + 144: 206, + 145: 207, + 146: 208, + 147: 209, + 148: 210, + 149: 211, + 150: 212, + 151: 213, + 152: 214, + 153: 215, + 154: 216, + 155: 217, + 156: 27, + 157: 59, + 158: 54, + 159: 70, + 160: 3, + 161: 37, + 162: 21, + 163: 44, + 164: 28, + 165: 58, + 166: 13, + 167: 41, + 168: 2, + 169: 48, + 170: 39, + 171: 53, + 172: 19, + 173: 46, + 174: 218, + 175: 219, + 176: 220, + 177: 221, + 178: 222, + 179: 223, + 180: 224, + 181: 26, + 182: 55, + 183: 4, + 184: 42, + 185: 225, + 186: 226, + 187: 227, + 188: 228, + 189: 23, + 190: 60, + 191: 229, + 192: 230, + 193: 231, + 194: 232, + 195: 233, + 196: 234, + 197: 235, + 198: 11, + 199: 36, + 200: 236, + 201: 237, + 202: 238, + 203: 239, + 204: 240, + 205: 241, + 206: 242, + 207: 243, + 208: 8, + 209: 49, + 210: 12, + 211: 38, + 212: 5, + 213: 31, + 214: 1, + 215: 34, + 216: 15, + 217: 244, + 218: 245, + 219: 246, + 220: 247, + 221: 35, + 222: 16, + 223: 248, + 224: 43, + 225: 9, + 226: 45, + 227: 7, + 228: 32, + 229: 6, + 230: 40, + 231: 14, + 232: 52, + 233: 24, + 234: 56, + 235: 10, + 236: 33, + 237: 17, + 238: 61, + 239: 249, + 240: 250, + 241: 18, + 242: 62, + 243: 20, + 244: 51, + 245: 25, + 246: 57, + 247: 30, + 248: 47, + 249: 29, + 250: 63, + 251: 22, + 252: 50, + 253: 251, + 254: 252, + 255: 255} +IBM855_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name="IBM855", language="Russian", + char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё") +KOI8_R_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 142, + 66: 143, + 67: 144, + 68: 145, + 69: 146, + 70: 147, + 71: 148, + 72: 149, + 73: 150, + 74: 151, + 75: 152, + 76: 74, + 77: 153, + 78: 75, + 79: 154, + 80: 155, + 81: 156, + 82: 157, + 83: 158, + 84: 159, + 85: 160, + 86: 161, + 87: 162, + 88: 163, + 89: 164, + 90: 165, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 71, + 98: 172, + 99: 66, + 100: 173, + 101: 65, + 102: 174, + 103: 76, + 104: 175, + 105: 64, + 106: 176, + 107: 177, + 108: 77, + 109: 72, + 110: 178, + 111: 69, + 112: 67, + 113: 179, + 114: 78, + 115: 73, + 116: 180, + 117: 181, + 118: 79, + 119: 182, + 120: 183, + 121: 184, + 122: 185, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 191, + 129: 192, + 130: 193, + 131: 194, + 132: 195, + 133: 196, + 134: 197, + 135: 198, + 136: 199, + 137: 200, + 138: 201, + 139: 202, + 140: 203, + 141: 204, + 142: 205, + 143: 206, + 144: 207, + 145: 208, + 146: 209, + 147: 210, + 148: 211, + 149: 212, + 150: 213, + 151: 214, + 152: 215, + 153: 216, + 154: 217, + 155: 218, + 156: 219, + 157: 220, + 158: 221, + 159: 222, + 160: 223, + 161: 224, + 162: 225, + 163: 68, + 164: 226, + 165: 227, + 166: 228, + 167: 229, + 168: 230, + 169: 231, + 170: 232, + 171: 233, + 172: 234, + 173: 235, + 174: 236, + 175: 237, + 176: 238, + 177: 239, + 178: 240, + 179: 241, + 180: 242, + 181: 243, + 182: 244, + 183: 245, + 184: 246, + 185: 247, + 186: 248, + 187: 249, + 188: 250, + 189: 251, + 190: 252, + 191: 253, + 192: 27, + 193: 3, + 194: 21, + 195: 28, + 196: 13, + 197: 2, + 198: 39, + 199: 19, + 200: 26, + 201: 4, + 202: 23, + 203: 11, + 204: 8, + 205: 12, + 206: 5, + 207: 1, + 208: 15, + 209: 16, + 210: 9, + 211: 7, + 212: 6, + 213: 14, + 214: 24, + 215: 10, + 216: 17, + 217: 18, + 218: 20, + 219: 25, + 220: 30, + 221: 29, + 222: 22, + 223: 54, + 224: 59, + 225: 37, + 226: 44, + 227: 58, + 228: 41, + 229: 48, + 230: 53, + 231: 46, + 232: 55, + 233: 42, + 234: 60, + 235: 36, + 236: 49, + 237: 38, + 238: 31, + 239: 34, + 240: 35, + 241: 43, + 242: 45, + 243: 32, + 244: 40, + 245: 52, + 246: 56, + 247: 33, + 248: 61, + 249: 62, + 250: 51, + 251: 57, + 252: 47, + 253: 63, + 254: 50, + 255: 70} +KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name="KOI8-R", language="Russian", + char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё") +MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 142, + 66: 143, + 67: 144, + 68: 145, + 69: 146, + 70: 147, + 71: 148, + 72: 149, + 73: 150, + 74: 151, + 75: 152, + 76: 74, + 77: 153, + 78: 75, + 79: 154, + 80: 155, + 81: 156, + 82: 157, + 83: 158, + 84: 159, + 85: 160, + 86: 161, + 87: 162, + 88: 163, + 89: 164, + 90: 165, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 71, + 98: 172, + 99: 66, + 100: 173, + 101: 65, + 102: 174, + 103: 76, + 104: 175, + 105: 64, + 106: 176, + 107: 177, + 108: 77, + 109: 72, + 110: 178, + 111: 69, + 112: 67, + 113: 179, + 114: 78, + 115: 73, + 116: 180, + 117: 181, + 118: 79, + 119: 182, + 120: 183, + 121: 184, + 122: 185, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 37, + 129: 44, + 130: 33, + 131: 46, + 132: 41, + 133: 48, + 134: 56, + 135: 51, + 136: 42, + 137: 60, + 138: 36, + 139: 49, + 140: 38, + 141: 31, + 142: 34, + 143: 35, + 144: 45, + 145: 32, + 146: 40, + 147: 52, + 148: 53, + 149: 55, + 150: 58, + 151: 50, + 152: 57, + 153: 63, + 154: 70, + 155: 62, + 156: 61, + 157: 47, + 158: 59, + 159: 43, + 160: 191, + 161: 192, + 162: 193, + 163: 194, + 164: 195, + 165: 196, + 166: 197, + 167: 198, + 168: 199, + 169: 200, + 170: 201, + 171: 202, + 172: 203, + 173: 204, + 174: 205, + 175: 206, + 176: 207, + 177: 208, + 178: 209, + 179: 210, + 180: 211, + 181: 212, + 182: 213, + 183: 214, + 184: 215, + 185: 216, + 186: 217, + 187: 218, + 188: 219, + 189: 220, + 190: 221, + 191: 222, + 192: 223, + 193: 224, + 194: 225, + 195: 226, + 196: 227, + 197: 228, + 198: 229, + 199: 230, + 200: 231, + 201: 232, + 202: 233, + 203: 234, + 204: 235, + 205: 236, + 206: 237, + 207: 238, + 208: 239, + 209: 240, + 210: 241, + 211: 242, + 212: 243, + 213: 244, + 214: 245, + 215: 246, + 216: 247, + 217: 248, + 218: 249, + 219: 250, + 220: 251, + 221: 252, + 222: 68, + 223: 16, + 224: 3, + 225: 21, + 226: 10, + 227: 19, + 228: 13, + 229: 2, + 230: 24, + 231: 20, + 232: 4, + 233: 23, + 234: 11, + 235: 8, + 236: 12, + 237: 5, + 238: 1, + 239: 15, + 240: 9, + 241: 7, + 242: 6, + 243: 14, + 244: 39, + 245: 26, + 246: 28, + 247: 22, + 248: 25, + 249: 29, + 250: 54, + 251: 18, + 252: 17, + 253: 30, + 254: 27, + 255: 255} +MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name="MacCyrillic", language="Russian", + char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё") +ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 142, + 66: 143, + 67: 144, + 68: 145, + 69: 146, + 70: 147, + 71: 148, + 72: 149, + 73: 150, + 74: 151, + 75: 152, + 76: 74, + 77: 153, + 78: 75, + 79: 154, + 80: 155, + 81: 156, + 82: 157, + 83: 158, + 84: 159, + 85: 160, + 86: 161, + 87: 162, + 88: 163, + 89: 164, + 90: 165, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 71, + 98: 172, + 99: 66, + 100: 173, + 101: 65, + 102: 174, + 103: 76, + 104: 175, + 105: 64, + 106: 176, + 107: 177, + 108: 77, + 109: 72, + 110: 178, + 111: 69, + 112: 67, + 113: 179, + 114: 78, + 115: 73, + 116: 180, + 117: 181, + 118: 79, + 119: 182, + 120: 183, + 121: 184, + 122: 185, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 191, + 129: 192, + 130: 193, + 131: 194, + 132: 195, + 133: 196, + 134: 197, + 135: 198, + 136: 199, + 137: 200, + 138: 201, + 139: 202, + 140: 203, + 141: 204, + 142: 205, + 143: 206, + 144: 207, + 145: 208, + 146: 209, + 147: 210, + 148: 211, + 149: 212, + 150: 213, + 151: 214, + 152: 215, + 153: 216, + 154: 217, + 155: 218, + 156: 219, + 157: 220, + 158: 221, + 159: 222, + 160: 223, + 161: 224, + 162: 225, + 163: 226, + 164: 227, + 165: 228, + 166: 229, + 167: 230, + 168: 231, + 169: 232, + 170: 233, + 171: 234, + 172: 235, + 173: 236, + 174: 237, + 175: 238, + 176: 37, + 177: 44, + 178: 33, + 179: 46, + 180: 41, + 181: 48, + 182: 56, + 183: 51, + 184: 42, + 185: 60, + 186: 36, + 187: 49, + 188: 38, + 189: 31, + 190: 34, + 191: 35, + 192: 45, + 193: 32, + 194: 40, + 195: 52, + 196: 53, + 197: 55, + 198: 58, + 199: 50, + 200: 57, + 201: 63, + 202: 70, + 203: 62, + 204: 61, + 205: 47, + 206: 59, + 207: 43, + 208: 3, + 209: 21, + 210: 10, + 211: 19, + 212: 13, + 213: 2, + 214: 24, + 215: 20, + 216: 4, + 217: 23, + 218: 11, + 219: 8, + 220: 12, + 221: 5, + 222: 1, + 223: 15, + 224: 9, + 225: 7, + 226: 6, + 227: 14, + 228: 39, + 229: 26, + 230: 28, + 231: 22, + 232: 25, + 233: 29, + 234: 54, + 235: 18, + 236: 17, + 237: 30, + 238: 27, + 239: 16, + 240: 239, + 241: 68, + 242: 240, + 243: 241, + 244: 242, + 245: 243, + 246: 244, + 247: 245, + 248: 246, + 249: 247, + 250: 248, + 251: 249, + 252: 250, + 253: 251, + 254: 252, + 255: 255} +ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name="ISO-8859-5", language="Russian", + char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet="ЁАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяё") diff --git a/APPS_UNCOMPILED/lib/chardet/langthaimodel.py b/APPS_UNCOMPILED/lib/chardet/langthaimodel.py new file mode 100644 index 0000000..3cfe6d4 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/langthaimodel.py @@ -0,0 +1,4303 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/langthaimodel.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 103300 bytes +from chardet.sbcharsetprober import SingleByteCharSetModel +THAI_LANG_MODEL = {5:{ + 5: 2, + 30: 2, + 24: 2, + 8: 2, + 26: 2, + 52: 0, + 34: 1, + 51: 1, + 47: 0, + 58: 3, + 57: 2, + 49: 0, + 53: 0, + 55: 0, + 43: 2, + 20: 2, + 19: 3, + 44: 0, + 14: 2, + 48: 0, + 3: 2, + 17: 1, + 25: 2, + 39: 1, + 62: 1, + 31: 1, + 54: 0, + 45: 1, + 9: 2, + 16: 1, + 2: 3, + 61: 2, + 15: 3, + 12: 3, + 42: 2, + 46: 3, + 18: 2, + 21: 2, + 4: 3, + 63: 1, + 22: 2, + 10: 3, + 1: 3, + 36: 3, + 23: 3, + 13: 3, + 40: 0, + 27: 2, + 32: 2, + 35: 1, + 11: 2, + 28: 2, + 41: 1, + 29: 1, + 33: 2, + 50: 1, + 37: 3, + 6: 3, + 7: 3, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 30:{ + 5: 1, + 30: 0, + 24: 1, + 8: 1, + 26: 1, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 2, + 20: 0, + 19: 2, + 44: 0, + 14: 1, + 48: 0, + 3: 2, + 17: 1, + 25: 1, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 2, + 2: 1, + 61: 0, + 15: 0, + 12: 2, + 42: 0, + 46: 0, + 18: 1, + 21: 1, + 4: 3, + 63: 0, + 22: 0, + 10: 3, + 1: 3, + 36: 0, + 23: 0, + 13: 2, + 40: 3, + 27: 1, + 32: 1, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 1, + 33: 0, + 50: 0, + 37: 1, + 6: 2, + 7: 3, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 24:{ + 5: 0, + 30: 0, + 24: 2, + 8: 2, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 2, + 20: 2, + 19: 2, + 44: 0, + 14: 1, + 48: 0, + 3: 3, + 17: 0, + 25: 1, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 2, + 16: 2, + 2: 3, + 61: 0, + 15: 3, + 12: 3, + 42: 0, + 46: 0, + 18: 1, + 21: 0, + 4: 2, + 63: 0, + 22: 2, + 10: 3, + 1: 2, + 36: 3, + 23: 3, + 13: 2, + 40: 0, + 27: 3, + 32: 3, + 35: 2, + 11: 1, + 28: 0, + 41: 3, + 29: 0, + 33: 0, + 50: 0, + 37: 1, + 6: 3, + 7: 3, + 38: 3, + 56: 0, + 59: 0, + 60: 0}, + 8:{ + 5: 3, + 30: 2, + 24: 3, + 8: 2, + 26: 2, + 52: 1, + 34: 2, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 2, + 19: 2, + 44: 1, + 14: 3, + 48: 1, + 3: 3, + 17: 2, + 25: 2, + 39: 2, + 62: 1, + 31: 2, + 54: 0, + 45: 1, + 9: 2, + 16: 1, + 2: 2, + 61: 0, + 15: 2, + 12: 2, + 42: 2, + 46: 1, + 18: 3, + 21: 3, + 4: 2, + 63: 0, + 22: 0, + 10: 1, + 1: 3, + 36: 0, + 23: 2, + 13: 1, + 40: 0, + 27: 1, + 32: 1, + 35: 0, + 11: 3, + 28: 2, + 41: 1, + 29: 2, + 33: 2, + 50: 3, + 37: 0, + 6: 2, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 26:{ + 5: 2, + 30: 1, + 24: 0, + 8: 2, + 26: 3, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 2, + 19: 1, + 44: 1, + 14: 2, + 48: 0, + 3: 3, + 17: 1, + 25: 0, + 39: 0, + 62: 0, + 31: 1, + 54: 0, + 45: 0, + 9: 1, + 16: 1, + 2: 3, + 61: 0, + 15: 0, + 12: 1, + 42: 0, + 46: 0, + 18: 2, + 21: 1, + 4: 2, + 63: 0, + 22: 3, + 10: 3, + 1: 3, + 36: 3, + 23: 2, + 13: 1, + 40: 3, + 27: 1, + 32: 3, + 35: 2, + 11: 1, + 28: 1, + 41: 0, + 29: 1, + 33: 1, + 50: 0, + 37: 0, + 6: 2, + 7: 2, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 52:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 3, + 25: 0, + 39: 0, + 62: 0, + 31: 3, + 54: 0, + 45: 0, + 9: 1, + 16: 1, + 2: 0, + 61: 0, + 15: 2, + 12: 1, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 1, + 10: 1, + 1: 1, + 36: 0, + 23: 1, + 13: 1, + 40: 0, + 27: 0, + 32: 1, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 34:{ + 5: 1, + 30: 0, + 24: 0, + 8: 1, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 1, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 1, + 48: 0, + 3: 3, + 17: 2, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 2, + 16: 1, + 2: 1, + 61: 0, + 15: 0, + 12: 1, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 2, + 63: 0, + 22: 0, + 10: 2, + 1: 3, + 36: 1, + 23: 3, + 13: 2, + 40: 0, + 27: 3, + 32: 3, + 35: 1, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 1, + 6: 3, + 7: 3, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 51:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 1, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 1, + 12: 0, + 42: 0, + 46: 0, + 18: 1, + 21: 0, + 4: 2, + 63: 0, + 22: 0, + 10: 1, + 1: 1, + 36: 0, + 23: 1, + 13: 2, + 40: 3, + 27: 2, + 32: 1, + 35: 1, + 11: 1, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 1, + 6: 1, + 7: 2, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 47:{ + 5: 1, + 30: 1, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 1, + 51: 0, + 47: 3, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 1, + 48: 0, + 3: 0, + 17: 1, + 25: 1, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 1, + 16: 0, + 2: 0, + 61: 0, + 15: 1, + 12: 0, + 42: 0, + 46: 0, + 18: 1, + 21: 2, + 4: 1, + 63: 0, + 22: 1, + 10: 2, + 1: 3, + 36: 0, + 23: 1, + 13: 1, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 1, + 28: 1, + 41: 0, + 29: 1, + 33: 0, + 50: 1, + 37: 0, + 6: 2, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 58:{ + 5: 2, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 1, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 1, + 13: 2, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 57:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 3, + 13: 1, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 49:{ + 5: 1, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 2, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 2, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 1, + 46: 0, + 18: 0, + 21: 0, + 4: 1, + 63: 0, + 22: 0, + 10: 0, + 1: 3, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 53:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 2, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 3, + 56: 0, + 59: 0, + 60: 0}, + 55:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 3, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 1, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 1, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 43:{ + 5: 1, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 3, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 3, + 9: 0, + 16: 0, + 2: 1, + 61: 0, + 15: 0, + 12: 1, + 42: 0, + 46: 0, + 18: 1, + 21: 1, + 4: 0, + 63: 0, + 22: 3, + 10: 0, + 1: 3, + 36: 0, + 23: 1, + 13: 2, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 1, + 28: 1, + 41: 0, + 29: 1, + 33: 1, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 3, + 56: 0, + 59: 0, + 60: 0}, + 20:{ + 5: 2, + 30: 2, + 24: 2, + 8: 3, + 26: 2, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 2, + 44: 1, + 14: 2, + 48: 0, + 3: 1, + 17: 1, + 25: 1, + 39: 1, + 62: 0, + 31: 1, + 54: 0, + 45: 1, + 9: 2, + 16: 3, + 2: 2, + 61: 0, + 15: 2, + 12: 2, + 42: 0, + 46: 0, + 18: 2, + 21: 2, + 4: 1, + 63: 0, + 22: 0, + 10: 3, + 1: 2, + 36: 2, + 23: 3, + 13: 3, + 40: 1, + 27: 2, + 32: 3, + 35: 2, + 11: 2, + 28: 2, + 41: 1, + 29: 2, + 33: 2, + 50: 2, + 37: 2, + 6: 1, + 7: 3, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 19:{ + 5: 2, + 30: 1, + 24: 1, + 8: 0, + 26: 1, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 1, + 44: 2, + 14: 1, + 48: 0, + 3: 2, + 17: 1, + 25: 1, + 39: 1, + 62: 0, + 31: 1, + 54: 0, + 45: 2, + 9: 1, + 16: 1, + 2: 3, + 61: 0, + 15: 2, + 12: 1, + 42: 0, + 46: 0, + 18: 3, + 21: 0, + 4: 3, + 63: 1, + 22: 2, + 10: 3, + 1: 3, + 36: 2, + 23: 3, + 13: 2, + 40: 1, + 27: 1, + 32: 3, + 35: 2, + 11: 1, + 28: 1, + 41: 1, + 29: 1, + 33: 1, + 50: 0, + 37: 2, + 6: 3, + 7: 3, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 44:{ + 5: 1, + 30: 0, + 24: 1, + 8: 0, + 26: 1, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 1, + 44: 0, + 14: 1, + 48: 0, + 3: 1, + 17: 2, + 25: 0, + 39: 0, + 62: 0, + 31: 1, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 1, + 61: 0, + 15: 1, + 12: 1, + 42: 0, + 46: 0, + 18: 1, + 21: 0, + 4: 1, + 63: 0, + 22: 0, + 10: 2, + 1: 3, + 36: 0, + 23: 2, + 13: 1, + 40: 3, + 27: 2, + 32: 2, + 35: 3, + 11: 1, + 28: 1, + 41: 0, + 29: 1, + 33: 1, + 50: 0, + 37: 0, + 6: 2, + 7: 3, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 14:{ + 5: 1, + 30: 1, + 24: 3, + 8: 1, + 26: 1, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 2, + 19: 1, + 44: 0, + 14: 1, + 48: 3, + 3: 3, + 17: 2, + 25: 2, + 39: 1, + 62: 0, + 31: 2, + 54: 0, + 45: 0, + 9: 1, + 16: 3, + 2: 3, + 61: 1, + 15: 1, + 12: 2, + 42: 3, + 46: 1, + 18: 1, + 21: 0, + 4: 2, + 63: 0, + 22: 2, + 10: 3, + 1: 3, + 36: 3, + 23: 2, + 13: 3, + 40: 2, + 27: 1, + 32: 3, + 35: 1, + 11: 0, + 28: 1, + 41: 0, + 29: 1, + 33: 0, + 50: 0, + 37: 1, + 6: 3, + 7: 3, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 48:{ + 5: 0, + 30: 0, + 24: 0, + 8: 1, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 1, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 2, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 2, + 36: 0, + 23: 3, + 13: 3, + 40: 0, + 27: 0, + 32: 2, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 3, + 56: 0, + 59: 0, + 60: 0}, + 3:{ + 5: 3, + 30: 2, + 24: 3, + 8: 1, + 26: 2, + 52: 0, + 34: 1, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 1, + 53: 0, + 55: 0, + 43: 0, + 20: 3, + 19: 3, + 44: 2, + 14: 3, + 48: 3, + 3: 2, + 17: 2, + 25: 2, + 39: 2, + 62: 0, + 31: 2, + 54: 1, + 45: 1, + 9: 2, + 16: 2, + 2: 2, + 61: 1, + 15: 2, + 12: 3, + 42: 1, + 46: 0, + 18: 2, + 21: 2, + 4: 3, + 63: 1, + 22: 2, + 10: 3, + 1: 3, + 36: 3, + 23: 3, + 13: 3, + 40: 3, + 27: 3, + 32: 3, + 35: 2, + 11: 3, + 28: 2, + 41: 3, + 29: 3, + 33: 3, + 50: 2, + 37: 1, + 6: 3, + 7: 3, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 17:{ + 5: 3, + 30: 2, + 24: 2, + 8: 1, + 26: 1, + 52: 1, + 34: 1, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 2, + 44: 1, + 14: 3, + 48: 0, + 3: 3, + 17: 3, + 25: 2, + 39: 2, + 62: 0, + 31: 1, + 54: 1, + 45: 1, + 9: 1, + 16: 0, + 2: 3, + 61: 0, + 15: 2, + 12: 3, + 42: 0, + 46: 0, + 18: 2, + 21: 2, + 4: 2, + 63: 1, + 22: 0, + 10: 3, + 1: 3, + 36: 2, + 23: 2, + 13: 2, + 40: 0, + 27: 2, + 32: 3, + 35: 2, + 11: 2, + 28: 2, + 41: 1, + 29: 2, + 33: 2, + 50: 0, + 37: 1, + 6: 2, + 7: 2, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 25:{ + 5: 2, + 30: 0, + 24: 1, + 8: 0, + 26: 1, + 52: 0, + 34: 0, + 51: 1, + 47: 0, + 58: 1, + 57: 3, + 49: 1, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 1, + 44: 1, + 14: 1, + 48: 0, + 3: 2, + 17: 0, + 25: 1, + 39: 1, + 62: 1, + 31: 1, + 54: 0, + 45: 0, + 9: 1, + 16: 0, + 2: 3, + 61: 0, + 15: 3, + 12: 1, + 42: 0, + 46: 1, + 18: 2, + 21: 1, + 4: 2, + 63: 0, + 22: 1, + 10: 3, + 1: 1, + 36: 0, + 23: 2, + 13: 3, + 40: 0, + 27: 0, + 32: 1, + 35: 0, + 11: 1, + 28: 2, + 41: 0, + 29: 1, + 33: 2, + 50: 0, + 37: 3, + 6: 1, + 7: 2, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 39:{ + 5: 1, + 30: 0, + 24: 0, + 8: 1, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 2, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 1, + 16: 2, + 2: 0, + 61: 0, + 15: 3, + 12: 0, + 42: 0, + 46: 0, + 18: 1, + 21: 0, + 4: 0, + 63: 0, + 22: 1, + 10: 1, + 1: 0, + 36: 0, + 23: 2, + 13: 0, + 40: 0, + 27: 1, + 32: 0, + 35: 3, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 3, + 7: 1, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 62:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 1, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 1, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 1, + 1: 0, + 36: 0, + 23: 0, + 13: 1, + 40: 2, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 2, + 7: 1, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 31:{ + 5: 1, + 30: 1, + 24: 1, + 8: 1, + 26: 1, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 1, + 20: 1, + 19: 1, + 44: 0, + 14: 2, + 48: 1, + 3: 3, + 17: 2, + 25: 0, + 39: 1, + 62: 0, + 31: 1, + 54: 0, + 45: 0, + 9: 1, + 16: 2, + 2: 3, + 61: 2, + 15: 2, + 12: 2, + 42: 0, + 46: 0, + 18: 1, + 21: 1, + 4: 2, + 63: 1, + 22: 0, + 10: 3, + 1: 3, + 36: 0, + 23: 3, + 13: 2, + 40: 1, + 27: 3, + 32: 1, + 35: 2, + 11: 1, + 28: 1, + 41: 0, + 29: 1, + 33: 1, + 50: 0, + 37: 1, + 6: 0, + 7: 1, + 38: 3, + 56: 0, + 59: 0, + 60: 0}, + 54:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 1, + 44: 0, + 14: 1, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 2, + 45: 0, + 9: 0, + 16: 0, + 2: 1, + 61: 0, + 15: 2, + 12: 0, + 42: 0, + 46: 0, + 18: 1, + 21: 0, + 4: 1, + 63: 0, + 22: 0, + 10: 2, + 1: 0, + 36: 0, + 23: 1, + 13: 1, + 40: 0, + 27: 1, + 32: 1, + 35: 0, + 11: 0, + 28: 1, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 2, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 45:{ + 5: 0, + 30: 0, + 24: 1, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 3, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 1, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 1, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 3, + 1: 3, + 36: 0, + 23: 1, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 2, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 9:{ + 5: 2, + 30: 2, + 24: 2, + 8: 2, + 26: 2, + 52: 0, + 34: 1, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 1, + 20: 2, + 19: 2, + 44: 1, + 14: 2, + 48: 1, + 3: 3, + 17: 2, + 25: 2, + 39: 1, + 62: 0, + 31: 3, + 54: 0, + 45: 1, + 9: 2, + 16: 1, + 2: 2, + 61: 2, + 15: 2, + 12: 2, + 42: 1, + 46: 1, + 18: 3, + 21: 3, + 4: 3, + 63: 0, + 22: 1, + 10: 3, + 1: 3, + 36: 0, + 23: 3, + 13: 3, + 40: 0, + 27: 3, + 32: 3, + 35: 3, + 11: 2, + 28: 2, + 41: 2, + 29: 2, + 33: 2, + 50: 1, + 37: 1, + 6: 3, + 7: 2, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 16:{ + 5: 3, + 30: 1, + 24: 2, + 8: 3, + 26: 2, + 52: 0, + 34: 2, + 51: 0, + 47: 2, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 2, + 19: 2, + 44: 1, + 14: 2, + 48: 1, + 3: 3, + 17: 3, + 25: 1, + 39: 1, + 62: 0, + 31: 1, + 54: 0, + 45: 1, + 9: 2, + 16: 0, + 2: 2, + 61: 0, + 15: 1, + 12: 3, + 42: 1, + 46: 0, + 18: 2, + 21: 1, + 4: 2, + 63: 0, + 22: 2, + 10: 3, + 1: 3, + 36: 0, + 23: 2, + 13: 3, + 40: 1, + 27: 2, + 32: 2, + 35: 3, + 11: 2, + 28: 1, + 41: 1, + 29: 2, + 33: 2, + 50: 2, + 37: 1, + 6: 3, + 7: 2, + 38: 3, + 56: 0, + 59: 0, + 60: 0}, + 2:{ + 5: 3, + 30: 2, + 24: 2, + 8: 3, + 26: 2, + 52: 0, + 34: 2, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 3, + 53: 0, + 55: 0, + 43: 3, + 20: 2, + 19: 2, + 44: 3, + 14: 3, + 48: 1, + 3: 2, + 17: 2, + 25: 3, + 39: 2, + 62: 1, + 31: 2, + 54: 1, + 45: 1, + 9: 3, + 16: 2, + 2: 3, + 61: 0, + 15: 2, + 12: 3, + 42: 2, + 46: 2, + 18: 2, + 21: 2, + 4: 3, + 63: 1, + 22: 3, + 10: 3, + 1: 3, + 36: 0, + 23: 3, + 13: 3, + 40: 2, + 27: 3, + 32: 3, + 35: 3, + 11: 3, + 28: 3, + 41: 1, + 29: 2, + 33: 1, + 50: 0, + 37: 3, + 6: 3, + 7: 3, + 38: 3, + 56: 0, + 59: 0, + 60: 0}, + 61:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 2, + 44: 0, + 14: 2, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 1, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 2, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 15:{ + 5: 2, + 30: 3, + 24: 1, + 8: 3, + 26: 1, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 2, + 19: 2, + 44: 1, + 14: 2, + 48: 0, + 3: 1, + 17: 2, + 25: 2, + 39: 1, + 62: 0, + 31: 0, + 54: 0, + 45: 1, + 9: 1, + 16: 3, + 2: 1, + 61: 0, + 15: 1, + 12: 1, + 42: 0, + 46: 0, + 18: 2, + 21: 1, + 4: 3, + 63: 2, + 22: 3, + 10: 3, + 1: 3, + 36: 2, + 23: 3, + 13: 3, + 40: 2, + 27: 3, + 32: 2, + 35: 3, + 11: 2, + 28: 1, + 41: 1, + 29: 2, + 33: 1, + 50: 0, + 37: 2, + 6: 3, + 7: 3, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 12:{ + 5: 3, + 30: 2, + 24: 1, + 8: 3, + 26: 2, + 52: 0, + 34: 1, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 1, + 20: 2, + 19: 1, + 44: 1, + 14: 1, + 48: 0, + 3: 3, + 17: 2, + 25: 1, + 39: 1, + 62: 0, + 31: 1, + 54: 1, + 45: 0, + 9: 3, + 16: 3, + 2: 3, + 61: 0, + 15: 3, + 12: 1, + 42: 0, + 46: 0, + 18: 2, + 21: 2, + 4: 2, + 63: 0, + 22: 2, + 10: 3, + 1: 3, + 36: 0, + 23: 3, + 13: 2, + 40: 0, + 27: 0, + 32: 2, + 35: 0, + 11: 3, + 28: 2, + 41: 1, + 29: 1, + 33: 2, + 50: 1, + 37: 0, + 6: 3, + 7: 3, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 42:{ + 5: 1, + 30: 0, + 24: 1, + 8: 0, + 26: 1, + 52: 0, + 34: 0, + 51: 0, + 47: 1, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 1, + 44: 0, + 14: 1, + 48: 0, + 3: 2, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 2, + 61: 0, + 15: 0, + 12: 2, + 42: 1, + 46: 2, + 18: 1, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 2, + 1: 3, + 36: 0, + 23: 2, + 13: 0, + 40: 3, + 27: 0, + 32: 0, + 35: 2, + 11: 0, + 28: 1, + 41: 0, + 29: 1, + 33: 1, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 46:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 2, + 57: 1, + 49: 2, + 53: 0, + 55: 0, + 43: 3, + 20: 0, + 19: 1, + 44: 0, + 14: 1, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 1, + 9: 1, + 16: 2, + 2: 2, + 61: 0, + 15: 0, + 12: 0, + 42: 1, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 2, + 10: 2, + 1: 3, + 36: 0, + 23: 0, + 13: 1, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 1, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 18:{ + 5: 2, + 30: 0, + 24: 0, + 8: 2, + 26: 1, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 3, + 19: 3, + 44: 3, + 14: 0, + 48: 0, + 3: 3, + 17: 2, + 25: 1, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 2, + 9: 3, + 16: 1, + 2: 3, + 61: 0, + 15: 1, + 12: 2, + 42: 0, + 46: 0, + 18: 0, + 21: 2, + 4: 3, + 63: 0, + 22: 2, + 10: 3, + 1: 3, + 36: 3, + 23: 3, + 13: 3, + 40: 2, + 27: 3, + 32: 3, + 35: 3, + 11: 2, + 28: 0, + 41: 1, + 29: 0, + 33: 1, + 50: 0, + 37: 0, + 6: 3, + 7: 1, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 21:{ + 5: 3, + 30: 0, + 24: 0, + 8: 1, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 2, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 3, + 44: 0, + 14: 0, + 48: 0, + 3: 3, + 17: 0, + 25: 1, + 39: 0, + 62: 0, + 31: 1, + 54: 0, + 45: 0, + 9: 3, + 16: 2, + 2: 3, + 61: 0, + 15: 3, + 12: 2, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 3, + 63: 0, + 22: 1, + 10: 3, + 1: 3, + 36: 0, + 23: 1, + 13: 1, + 40: 0, + 27: 0, + 32: 1, + 35: 1, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 3, + 6: 3, + 7: 3, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 4:{ + 5: 3, + 30: 1, + 24: 2, + 8: 3, + 26: 1, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 3, + 19: 2, + 44: 1, + 14: 2, + 48: 1, + 3: 3, + 17: 3, + 25: 1, + 39: 1, + 62: 0, + 31: 1, + 54: 1, + 45: 1, + 9: 3, + 16: 3, + 2: 3, + 61: 0, + 15: 2, + 12: 2, + 42: 1, + 46: 0, + 18: 2, + 21: 2, + 4: 3, + 63: 0, + 22: 2, + 10: 3, + 1: 3, + 36: 2, + 23: 2, + 13: 3, + 40: 0, + 27: 3, + 32: 3, + 35: 0, + 11: 3, + 28: 1, + 41: 1, + 29: 2, + 33: 2, + 50: 1, + 37: 1, + 6: 2, + 7: 2, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 63:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 2, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 22:{ + 5: 3, + 30: 1, + 24: 2, + 8: 1, + 26: 2, + 52: 0, + 34: 3, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 3, + 19: 3, + 44: 1, + 14: 3, + 48: 1, + 3: 2, + 17: 3, + 25: 2, + 39: 1, + 62: 0, + 31: 2, + 54: 0, + 45: 1, + 9: 3, + 16: 2, + 2: 2, + 61: 0, + 15: 2, + 12: 2, + 42: 0, + 46: 0, + 18: 3, + 21: 3, + 4: 2, + 63: 1, + 22: 1, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 3, + 28: 2, + 41: 1, + 29: 2, + 33: 2, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 10:{ + 5: 3, + 30: 0, + 24: 1, + 8: 3, + 26: 3, + 52: 0, + 34: 1, + 51: 0, + 47: 3, + 58: 0, + 57: 0, + 49: 2, + 53: 0, + 55: 3, + 43: 3, + 20: 3, + 19: 3, + 44: 0, + 14: 2, + 48: 0, + 3: 3, + 17: 3, + 25: 1, + 39: 0, + 62: 0, + 31: 2, + 54: 0, + 45: 0, + 9: 3, + 16: 3, + 2: 0, + 61: 0, + 15: 2, + 12: 3, + 42: 2, + 46: 0, + 18: 3, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 3, + 7: 3, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 1:{ + 5: 3, + 30: 2, + 24: 3, + 8: 3, + 26: 3, + 52: 0, + 34: 3, + 51: 1, + 47: 2, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 3, + 20: 3, + 19: 3, + 44: 1, + 14: 3, + 48: 2, + 3: 3, + 17: 3, + 25: 2, + 39: 1, + 62: 1, + 31: 3, + 54: 1, + 45: 1, + 9: 3, + 16: 3, + 2: 3, + 61: 0, + 15: 3, + 12: 3, + 42: 2, + 46: 3, + 18: 3, + 21: 3, + 4: 2, + 63: 1, + 22: 3, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 3, + 28: 2, + 41: 1, + 29: 2, + 33: 2, + 50: 1, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 36:{ + 5: 2, + 30: 1, + 24: 3, + 8: 2, + 26: 1, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 1, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 1, + 44: 1, + 14: 1, + 48: 0, + 3: 3, + 17: 1, + 25: 1, + 39: 1, + 62: 0, + 31: 1, + 54: 0, + 45: 1, + 9: 1, + 16: 0, + 2: 2, + 61: 0, + 15: 2, + 12: 1, + 42: 0, + 46: 0, + 18: 1, + 21: 3, + 4: 1, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 3, + 28: 2, + 41: 1, + 29: 2, + 33: 2, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 23:{ + 5: 3, + 30: 1, + 24: 2, + 8: 3, + 26: 3, + 52: 0, + 34: 3, + 51: 0, + 47: 2, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 3, + 19: 3, + 44: 1, + 14: 3, + 48: 3, + 3: 3, + 17: 3, + 25: 2, + 39: 2, + 62: 0, + 31: 3, + 54: 1, + 45: 2, + 9: 3, + 16: 2, + 2: 2, + 61: 0, + 15: 2, + 12: 3, + 42: 3, + 46: 2, + 18: 2, + 21: 3, + 4: 1, + 63: 1, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 3, + 28: 1, + 41: 1, + 29: 1, + 33: 0, + 50: 0, + 37: 0, + 6: 3, + 7: 2, + 38: 2, + 56: 0, + 59: 0, + 60: 0}, + 13:{ + 5: 3, + 30: 2, + 24: 2, + 8: 0, + 26: 1, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 2, + 19: 1, + 44: 0, + 14: 2, + 48: 0, + 3: 1, + 17: 2, + 25: 2, + 39: 1, + 62: 0, + 31: 2, + 54: 0, + 45: 0, + 9: 2, + 16: 3, + 2: 2, + 61: 0, + 15: 1, + 12: 2, + 42: 1, + 46: 0, + 18: 2, + 21: 1, + 4: 2, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 2, + 28: 2, + 41: 1, + 29: 1, + 33: 1, + 50: 1, + 37: 0, + 6: 3, + 7: 3, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 40:{ + 5: 3, + 30: 0, + 24: 0, + 8: 3, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 1, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 3, + 7: 3, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 27:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 2, + 17: 3, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 2, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 3, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 3, + 7: 3, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 32:{ + 5: 3, + 30: 2, + 24: 3, + 8: 3, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 2, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 1, + 43: 3, + 20: 3, + 19: 3, + 44: 1, + 14: 2, + 48: 1, + 3: 2, + 17: 2, + 25: 2, + 39: 2, + 62: 0, + 31: 1, + 54: 0, + 45: 1, + 9: 3, + 16: 1, + 2: 2, + 61: 0, + 15: 2, + 12: 1, + 42: 1, + 46: 2, + 18: 1, + 21: 1, + 4: 1, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 1, + 28: 0, + 41: 1, + 29: 0, + 33: 1, + 50: 0, + 37: 0, + 6: 3, + 7: 2, + 38: 1, + 56: 0, + 59: 0, + 60: 0}, + 35:{ + 5: 3, + 30: 0, + 24: 0, + 8: 2, + 26: 1, + 52: 0, + 34: 0, + 51: 0, + 47: 2, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 1, + 20: 2, + 19: 2, + 44: 0, + 14: 1, + 48: 0, + 3: 2, + 17: 0, + 25: 3, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 2, + 16: 0, + 2: 1, + 61: 0, + 15: 3, + 12: 1, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 1, + 28: 1, + 41: 1, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 3, + 7: 3, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 11:{ + 5: 3, + 30: 3, + 24: 3, + 8: 2, + 26: 3, + 52: 3, + 34: 3, + 51: 2, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 1, + 20: 3, + 19: 3, + 44: 1, + 14: 3, + 48: 1, + 3: 3, + 17: 3, + 25: 3, + 39: 2, + 62: 1, + 31: 3, + 54: 1, + 45: 3, + 9: 3, + 16: 2, + 2: 3, + 61: 0, + 15: 3, + 12: 3, + 42: 2, + 46: 0, + 18: 3, + 21: 3, + 4: 3, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 28:{ + 5: 3, + 30: 2, + 24: 2, + 8: 1, + 26: 2, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 2, + 19: 3, + 44: 2, + 14: 3, + 48: 0, + 3: 3, + 17: 3, + 25: 2, + 39: 3, + 62: 0, + 31: 2, + 54: 2, + 45: 0, + 9: 2, + 16: 2, + 2: 2, + 61: 0, + 15: 3, + 12: 2, + 42: 0, + 46: 0, + 18: 3, + 21: 3, + 4: 1, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 41:{ + 5: 2, + 30: 1, + 24: 2, + 8: 0, + 26: 1, + 52: 1, + 34: 1, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 3, + 19: 2, + 44: 0, + 14: 2, + 48: 0, + 3: 3, + 17: 1, + 25: 3, + 39: 0, + 62: 0, + 31: 1, + 54: 1, + 45: 1, + 9: 1, + 16: 2, + 2: 2, + 61: 0, + 15: 3, + 12: 0, + 42: 1, + 46: 0, + 18: 2, + 21: 0, + 4: 2, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 29:{ + 5: 2, + 30: 0, + 24: 1, + 8: 0, + 26: 3, + 52: 0, + 34: 3, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 3, + 19: 1, + 44: 0, + 14: 0, + 48: 0, + 3: 3, + 17: 2, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 1, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 3, + 21: 3, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 33:{ + 5: 1, + 30: 2, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 1, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 3, + 19: 1, + 44: 0, + 14: 3, + 48: 0, + 3: 0, + 17: 1, + 25: 3, + 39: 0, + 62: 0, + 31: 0, + 54: 2, + 45: 0, + 9: 3, + 16: 0, + 2: 3, + 61: 0, + 15: 1, + 12: 3, + 42: 0, + 46: 0, + 18: 1, + 21: 2, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 50:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 37:{ + 5: 2, + 30: 1, + 24: 2, + 8: 2, + 26: 3, + 52: 0, + 34: 0, + 51: 0, + 47: 1, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 2, + 44: 0, + 14: 1, + 48: 0, + 3: 3, + 17: 3, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 2, + 16: 1, + 2: 0, + 61: 0, + 15: 0, + 12: 2, + 42: 0, + 46: 0, + 18: 1, + 21: 0, + 4: 1, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 1, + 28: 0, + 41: 0, + 29: 0, + 33: 1, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 6:{ + 5: 2, + 30: 1, + 24: 2, + 8: 3, + 26: 2, + 52: 0, + 34: 1, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 1, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 2, + 44: 1, + 14: 2, + 48: 1, + 3: 3, + 17: 1, + 25: 2, + 39: 2, + 62: 1, + 31: 1, + 54: 0, + 45: 0, + 9: 3, + 16: 3, + 2: 2, + 61: 0, + 15: 2, + 12: 3, + 42: 0, + 46: 0, + 18: 2, + 21: 1, + 4: 3, + 63: 0, + 22: 1, + 10: 0, + 1: 3, + 36: 2, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 3, + 28: 2, + 41: 1, + 29: 2, + 33: 2, + 50: 1, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 7:{ + 5: 2, + 30: 1, + 24: 2, + 8: 3, + 26: 2, + 52: 0, + 34: 1, + 51: 1, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 1, + 19: 2, + 44: 1, + 14: 2, + 48: 0, + 3: 3, + 17: 2, + 25: 2, + 39: 2, + 62: 0, + 31: 1, + 54: 1, + 45: 0, + 9: 3, + 16: 2, + 2: 2, + 61: 0, + 15: 1, + 12: 3, + 42: 1, + 46: 0, + 18: 2, + 21: 2, + 4: 3, + 63: 0, + 22: 0, + 10: 0, + 1: 3, + 36: 2, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 2, + 28: 2, + 41: 1, + 29: 2, + 33: 2, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 38:{ + 5: 2, + 30: 1, + 24: 1, + 8: 0, + 26: 1, + 52: 0, + 34: 1, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 2, + 19: 1, + 44: 1, + 14: 1, + 48: 0, + 3: 1, + 17: 1, + 25: 1, + 39: 0, + 62: 0, + 31: 1, + 54: 1, + 45: 0, + 9: 2, + 16: 0, + 2: 1, + 61: 1, + 15: 1, + 12: 1, + 42: 0, + 46: 0, + 18: 1, + 21: 1, + 4: 2, + 63: 1, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 2, + 28: 2, + 41: 1, + 29: 1, + 33: 1, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 0, + 59: 0, + 60: 0}, + 56:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 2, + 59: 1, + 60: 1}, + 59:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 1, + 59: 1, + 60: 3}, + 60:{ + 5: 0, + 30: 0, + 24: 0, + 8: 0, + 26: 0, + 52: 0, + 34: 0, + 51: 0, + 47: 0, + 58: 0, + 57: 0, + 49: 0, + 53: 0, + 55: 0, + 43: 0, + 20: 0, + 19: 0, + 44: 0, + 14: 0, + 48: 0, + 3: 0, + 17: 0, + 25: 0, + 39: 0, + 62: 0, + 31: 0, + 54: 0, + 45: 0, + 9: 0, + 16: 0, + 2: 0, + 61: 0, + 15: 0, + 12: 0, + 42: 0, + 46: 0, + 18: 0, + 21: 0, + 4: 0, + 63: 0, + 22: 0, + 10: 0, + 1: 0, + 36: 0, + 23: 0, + 13: 0, + 40: 0, + 27: 0, + 32: 0, + 35: 0, + 11: 0, + 28: 0, + 41: 0, + 29: 0, + 33: 0, + 50: 0, + 37: 0, + 6: 0, + 7: 0, + 38: 0, + 56: 2, + 59: 1, + 60: 0}} +TIS_620_THAI_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 254, + 11: 255, + 12: 255, + 13: 254, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 253, + 33: 253, + 34: 253, + 35: 253, + 36: 253, + 37: 253, + 38: 253, + 39: 253, + 40: 253, + 41: 253, + 42: 253, + 43: 253, + 44: 253, + 45: 253, + 46: 253, + 47: 253, + 48: 252, + 49: 252, + 50: 252, + 51: 252, + 52: 252, + 53: 252, + 54: 252, + 55: 252, + 56: 252, + 57: 252, + 58: 253, + 59: 253, + 60: 253, + 61: 253, + 62: 253, + 63: 253, + 64: 253, + 65: 182, + 66: 106, + 67: 107, + 68: 100, + 69: 183, + 70: 184, + 71: 185, + 72: 101, + 73: 94, + 74: 186, + 75: 187, + 76: 108, + 77: 109, + 78: 110, + 79: 111, + 80: 188, + 81: 189, + 82: 190, + 83: 89, + 84: 95, + 85: 112, + 86: 113, + 87: 191, + 88: 192, + 89: 193, + 90: 194, + 91: 253, + 92: 253, + 93: 253, + 94: 253, + 95: 253, + 96: 253, + 97: 64, + 98: 72, + 99: 73, + 100: 114, + 101: 74, + 102: 115, + 103: 116, + 104: 102, + 105: 81, + 106: 201, + 107: 117, + 108: 90, + 109: 103, + 110: 78, + 111: 82, + 112: 96, + 113: 202, + 114: 91, + 115: 79, + 116: 84, + 117: 104, + 118: 105, + 119: 97, + 120: 98, + 121: 92, + 122: 203, + 123: 253, + 124: 253, + 125: 253, + 126: 253, + 127: 253, + 128: 209, + 129: 210, + 130: 211, + 131: 212, + 132: 213, + 133: 88, + 134: 214, + 135: 215, + 136: 216, + 137: 217, + 138: 218, + 139: 219, + 140: 220, + 141: 118, + 142: 221, + 143: 222, + 144: 223, + 145: 224, + 146: 99, + 147: 85, + 148: 83, + 149: 225, + 150: 226, + 151: 227, + 152: 228, + 153: 229, + 154: 230, + 155: 231, + 156: 232, + 157: 233, + 158: 234, + 159: 235, + 160: 236, + 161: 5, + 162: 30, + 163: 237, + 164: 24, + 165: 238, + 166: 75, + 167: 8, + 168: 26, + 169: 52, + 170: 34, + 171: 51, + 172: 119, + 173: 47, + 174: 58, + 175: 57, + 176: 49, + 177: 53, + 178: 55, + 179: 43, + 180: 20, + 181: 19, + 182: 44, + 183: 14, + 184: 48, + 185: 3, + 186: 17, + 187: 25, + 188: 39, + 189: 62, + 190: 31, + 191: 54, + 192: 45, + 193: 9, + 194: 16, + 195: 2, + 196: 61, + 197: 15, + 198: 239, + 199: 12, + 200: 42, + 201: 46, + 202: 18, + 203: 21, + 204: 76, + 205: 4, + 206: 66, + 207: 63, + 208: 22, + 209: 10, + 210: 1, + 211: 36, + 212: 23, + 213: 13, + 214: 40, + 215: 27, + 216: 32, + 217: 35, + 218: 86, + 219: 240, + 220: 241, + 221: 242, + 222: 243, + 223: 244, + 224: 11, + 225: 28, + 226: 41, + 227: 29, + 228: 33, + 229: 245, + 230: 50, + 231: 37, + 232: 6, + 233: 7, + 234: 67, + 235: 77, + 236: 38, + 237: 93, + 238: 246, + 239: 247, + 240: 68, + 241: 56, + 242: 59, + 243: 65, + 244: 69, + 245: 60, + 246: 70, + 247: 80, + 248: 71, + 249: 87, + 250: 248, + 251: 249, + 252: 250, + 253: 251, + 254: 252, + 255: 253} +TIS_620_THAI_MODEL = SingleByteCharSetModel(charset_name="TIS-620", language="Thai", + char_to_order_map=TIS_620_THAI_CHAR_TO_ORDER, + language_model=THAI_LANG_MODEL, + typical_positive_ratio=0.926386, + keep_ascii_letters=False, + alphabet="กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛") diff --git a/APPS_UNCOMPILED/lib/chardet/langturkishmodel.py b/APPS_UNCOMPILED/lib/chardet/langturkishmodel.py new file mode 100644 index 0000000..9670969 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/langturkishmodel.py @@ -0,0 +1,4303 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/langturkishmodel.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 95934 bytes +from chardet.sbcharsetprober import SingleByteCharSetModel +TURKISH_LANG_MODEL = {23:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 0, + 12: 2, + 2: 3, + 18: 0, + 27: 1, + 25: 1, + 3: 1, + 24: 0, + 10: 2, + 5: 1, + 13: 1, + 4: 1, + 15: 0, + 26: 0, + 7: 1, + 8: 1, + 9: 1, + 14: 1, + 32: 0, + 57: 0, + 58: 0, + 11: 3, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 0, + 40: 0, + 19: 0}, + 37:{ + 23: 0, + 37: 0, + 47: 2, + 39: 0, + 29: 0, + 52: 2, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 1, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 1, + 44: 0, + 35: 1, + 31: 0, + 51: 0, + 38: 1, + 62: 0, + 43: 1, + 56: 0, + 1: 2, + 21: 0, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 0, + 13: 1, + 4: 1, + 15: 0, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 1, + 63: 0, + 54: 0, + 50: 1, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 1, + 17: 0, + 30: 0, + 41: 0, + 6: 0, + 40: 1, + 19: 1}, + 47:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 1, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 1, + 20: 0, + 46: 1, + 42: 0, + 48: 1, + 44: 1, + 35: 0, + 31: 0, + 51: 0, + 38: 1, + 62: 0, + 43: 1, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 2, + 10: 1, + 5: 2, + 13: 2, + 4: 2, + 15: 1, + 26: 0, + 7: 2, + 8: 0, + 9: 0, + 14: 3, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 2, + 63: 0, + 54: 0, + 50: 1, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 1, + 17: 0, + 30: 0, + 41: 1, + 6: 3, + 40: 0, + 19: 0}, + 39:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 1, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 1, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 2, + 21: 0, + 28: 2, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 1, + 13: 3, + 4: 0, + 15: 1, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 1, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 1, + 63: 0, + 54: 1, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 0, + 17: 0, + 30: 1, + 41: 0, + 6: 1, + 40: 1, + 19: 0}, + 29:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 1, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 0, + 12: 2, + 2: 3, + 18: 0, + 27: 1, + 25: 0, + 3: 1, + 24: 1, + 10: 0, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 0, + 7: 0, + 8: 1, + 9: 1, + 14: 1, + 32: 1, + 57: 0, + 58: 0, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 52:{ + 23: 0, + 37: 1, + 47: 1, + 39: 1, + 29: 1, + 52: 2, + 36: 0, + 45: 2, + 53: 1, + 60: 0, + 16: 0, + 49: 0, + 20: 1, + 46: 1, + 42: 1, + 48: 2, + 44: 1, + 35: 1, + 31: 1, + 51: 1, + 38: 1, + 62: 0, + 43: 2, + 56: 0, + 1: 0, + 21: 1, + 28: 1, + 12: 1, + 2: 0, + 18: 1, + 27: 0, + 25: 0, + 3: 2, + 24: 1, + 10: 0, + 5: 0, + 13: 1, + 4: 2, + 15: 1, + 26: 0, + 7: 2, + 8: 1, + 9: 1, + 14: 1, + 32: 0, + 57: 0, + 58: 0, + 11: 1, + 22: 1, + 63: 0, + 54: 0, + 50: 1, + 55: 2, + 59: 0, + 33: 0, + 61: 0, + 34: 2, + 17: 0, + 30: 1, + 41: 1, + 6: 2, + 40: 0, + 19: 2}, + 36:{ + 23: 1, + 37: 0, + 47: 1, + 39: 0, + 29: 0, + 52: 1, + 36: 2, + 45: 0, + 53: 0, + 60: 0, + 16: 2, + 49: 0, + 20: 0, + 46: 2, + 42: 1, + 48: 1, + 44: 1, + 35: 1, + 31: 0, + 51: 1, + 38: 2, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 1, + 12: 0, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 1, + 10: 1, + 5: 0, + 13: 3, + 4: 2, + 15: 0, + 26: 1, + 7: 0, + 8: 1, + 9: 1, + 14: 3, + 32: 0, + 57: 0, + 58: 1, + 11: 0, + 22: 2, + 63: 0, + 54: 1, + 50: 2, + 55: 0, + 59: 1, + 33: 2, + 61: 0, + 34: 0, + 17: 0, + 30: 1, + 41: 1, + 6: 2, + 40: 2, + 19: 1}, + 45:{ + 23: 0, + 37: 1, + 47: 0, + 39: 0, + 29: 0, + 52: 2, + 36: 2, + 45: 1, + 53: 1, + 60: 0, + 16: 2, + 49: 1, + 20: 0, + 46: 1, + 42: 1, + 48: 1, + 44: 0, + 35: 2, + 31: 0, + 51: 1, + 38: 2, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 2, + 24: 0, + 10: 1, + 5: 0, + 13: 2, + 4: 0, + 15: 1, + 26: 1, + 7: 1, + 8: 0, + 9: 0, + 14: 3, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 2, + 63: 0, + 54: 1, + 50: 1, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 1, + 17: 0, + 30: 2, + 41: 1, + 6: 0, + 40: 2, + 19: 1}, + 53:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 1, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 2, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 2, + 21: 0, + 28: 2, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 2, + 13: 2, + 4: 0, + 15: 0, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 2, + 63: 0, + 54: 1, + 50: 0, + 55: 0, + 59: 0, + 33: 2, + 61: 0, + 34: 1, + 17: 0, + 30: 0, + 41: 0, + 6: 0, + 40: 1, + 19: 1}, + 60:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 0, + 21: 1, + 28: 0, + 12: 1, + 2: 0, + 18: 0, + 27: 0, + 25: 0, + 3: 1, + 24: 0, + 10: 0, + 5: 0, + 13: 0, + 4: 1, + 15: 0, + 26: 0, + 7: 0, + 8: 1, + 9: 0, + 14: 0, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 0, + 40: 0, + 19: 0}, + 16:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 3, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 2, + 21: 3, + 28: 0, + 12: 3, + 2: 1, + 18: 3, + 27: 3, + 25: 3, + 3: 3, + 24: 2, + 10: 3, + 5: 0, + 13: 0, + 4: 3, + 15: 0, + 26: 1, + 7: 3, + 8: 3, + 9: 3, + 14: 0, + 32: 3, + 57: 0, + 58: 0, + 11: 2, + 22: 1, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 2, + 30: 0, + 41: 1, + 6: 3, + 40: 0, + 19: 0}, + 49:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 2, + 52: 0, + 36: 1, + 45: 1, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 1, + 46: 0, + 42: 2, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 1, + 56: 0, + 1: 0, + 21: 3, + 28: 0, + 12: 2, + 2: 0, + 18: 0, + 27: 0, + 25: 0, + 3: 2, + 24: 0, + 10: 1, + 5: 0, + 13: 0, + 4: 2, + 15: 1, + 26: 1, + 7: 1, + 8: 1, + 9: 1, + 14: 0, + 32: 0, + 57: 0, + 58: 0, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 2, + 59: 0, + 33: 0, + 61: 0, + 34: 1, + 17: 1, + 30: 1, + 41: 0, + 6: 2, + 40: 0, + 19: 0}, + 20:{ + 23: 1, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 1, + 16: 3, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 1, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 2, + 28: 0, + 12: 3, + 2: 3, + 18: 0, + 27: 1, + 25: 1, + 3: 2, + 24: 2, + 10: 2, + 5: 2, + 13: 3, + 4: 3, + 15: 0, + 26: 1, + 7: 3, + 8: 0, + 9: 2, + 14: 3, + 32: 0, + 57: 0, + 58: 0, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 3, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 46:{ + 23: 0, + 37: 1, + 47: 0, + 39: 0, + 29: 0, + 52: 1, + 36: 1, + 45: 1, + 53: 0, + 60: 0, + 16: 2, + 49: 0, + 20: 0, + 46: 1, + 42: 0, + 48: 0, + 44: 1, + 35: 1, + 31: 0, + 51: 1, + 38: 2, + 62: 0, + 43: 1, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 1, + 25: 0, + 3: 0, + 24: 2, + 10: 1, + 5: 1, + 13: 3, + 4: 2, + 15: 1, + 26: 1, + 7: 1, + 8: 0, + 9: 0, + 14: 3, + 32: 0, + 57: 0, + 58: 1, + 11: 1, + 22: 2, + 63: 0, + 54: 1, + 50: 1, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 1, + 17: 0, + 30: 0, + 41: 1, + 6: 2, + 40: 1, + 19: 1}, + 42:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 1, + 36: 0, + 45: 1, + 53: 0, + 60: 0, + 16: 2, + 49: 1, + 20: 0, + 46: 0, + 42: 0, + 48: 2, + 44: 1, + 35: 1, + 31: 0, + 51: 1, + 38: 1, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 3, + 13: 3, + 4: 0, + 15: 1, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 2, + 63: 0, + 54: 2, + 50: 1, + 55: 0, + 59: 0, + 33: 2, + 61: 0, + 34: 1, + 17: 0, + 30: 1, + 41: 2, + 6: 1, + 40: 1, + 19: 1}, + 48:{ + 23: 0, + 37: 0, + 47: 2, + 39: 0, + 29: 0, + 52: 2, + 36: 1, + 45: 1, + 53: 0, + 60: 0, + 16: 2, + 49: 0, + 20: 0, + 46: 1, + 42: 1, + 48: 1, + 44: 0, + 35: 1, + 31: 0, + 51: 0, + 38: 1, + 62: 0, + 43: 0, + 56: 0, + 1: 2, + 21: 0, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 1, + 5: 0, + 13: 2, + 4: 0, + 15: 2, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 2, + 11: 0, + 22: 2, + 63: 0, + 54: 1, + 50: 2, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 2, + 17: 0, + 30: 1, + 41: 1, + 6: 0, + 40: 2, + 19: 1}, + 44:{ + 23: 0, + 37: 0, + 47: 1, + 39: 0, + 29: 0, + 52: 1, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 1, + 56: 0, + 1: 3, + 21: 1, + 28: 1, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 1, + 5: 2, + 13: 2, + 4: 0, + 15: 1, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 1, + 22: 2, + 63: 0, + 54: 0, + 50: 1, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 1, + 17: 1, + 30: 1, + 41: 0, + 6: 2, + 40: 1, + 19: 1}, + 35:{ + 23: 0, + 37: 0, + 47: 1, + 39: 0, + 29: 0, + 52: 1, + 36: 1, + 45: 1, + 53: 0, + 60: 0, + 16: 3, + 49: 1, + 20: 1, + 46: 0, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 0, + 51: 1, + 38: 1, + 62: 0, + 43: 1, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 1, + 5: 1, + 13: 2, + 4: 1, + 15: 0, + 26: 0, + 7: 0, + 8: 0, + 9: 1, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 1, + 63: 0, + 54: 2, + 50: 2, + 55: 0, + 59: 0, + 33: 3, + 61: 0, + 34: 1, + 17: 0, + 30: 0, + 41: 0, + 6: 3, + 40: 2, + 19: 1}, + 31:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 1, + 16: 2, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 2, + 28: 0, + 12: 1, + 2: 3, + 18: 2, + 27: 2, + 25: 0, + 3: 1, + 24: 1, + 10: 2, + 5: 2, + 13: 3, + 4: 3, + 15: 0, + 26: 2, + 7: 2, + 8: 0, + 9: 2, + 14: 2, + 32: 1, + 57: 1, + 58: 1, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 51:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 1, + 36: 1, + 45: 0, + 53: 0, + 60: 0, + 16: 1, + 49: 0, + 20: 0, + 46: 1, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 0, + 51: 1, + 38: 1, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 1, + 12: 0, + 2: 3, + 18: 0, + 27: 2, + 25: 0, + 3: 0, + 24: 0, + 10: 1, + 5: 1, + 13: 3, + 4: 2, + 15: 0, + 26: 1, + 7: 0, + 8: 0, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 2, + 63: 0, + 54: 1, + 50: 1, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 0, + 30: 1, + 41: 1, + 6: 2, + 40: 0, + 19: 1}, + 38:{ + 23: 1, + 37: 1, + 47: 1, + 39: 0, + 29: 0, + 52: 2, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 0, + 20: 3, + 46: 0, + 42: 0, + 48: 1, + 44: 1, + 35: 0, + 31: 0, + 51: 1, + 38: 1, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 2, + 13: 2, + 4: 0, + 15: 2, + 26: 0, + 7: 0, + 8: 0, + 9: 1, + 14: 3, + 32: 0, + 57: 0, + 58: 0, + 11: 1, + 22: 2, + 63: 0, + 54: 1, + 50: 1, + 55: 0, + 59: 1, + 33: 2, + 61: 0, + 34: 1, + 17: 0, + 30: 1, + 41: 1, + 6: 3, + 40: 2, + 19: 1}, + 62:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 0, + 21: 0, + 28: 0, + 12: 0, + 2: 0, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 0, + 13: 0, + 4: 0, + 15: 0, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 0, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 0, + 40: 0, + 19: 0}, + 43:{ + 23: 0, + 37: 0, + 47: 1, + 39: 0, + 29: 0, + 52: 2, + 36: 0, + 45: 1, + 53: 1, + 60: 0, + 16: 2, + 49: 0, + 20: 0, + 46: 2, + 42: 0, + 48: 2, + 44: 1, + 35: 1, + 31: 0, + 51: 1, + 38: 2, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 1, + 10: 1, + 5: 1, + 13: 3, + 4: 0, + 15: 2, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 3, + 32: 0, + 57: 0, + 58: 1, + 11: 0, + 22: 2, + 63: 0, + 54: 1, + 50: 2, + 55: 1, + 59: 1, + 33: 0, + 61: 0, + 34: 1, + 17: 0, + 30: 1, + 41: 1, + 6: 0, + 40: 2, + 19: 1}, + 56:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 2, + 1: 2, + 21: 1, + 28: 0, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 2, + 24: 1, + 10: 0, + 5: 0, + 13: 1, + 4: 1, + 15: 0, + 26: 0, + 7: 1, + 8: 1, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 1, + 40: 0, + 19: 0}, + 1:{ + 23: 3, + 37: 0, + 47: 1, + 39: 0, + 29: 3, + 52: 0, + 36: 1, + 45: 1, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 3, + 46: 1, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 3, + 51: 0, + 38: 1, + 62: 0, + 43: 0, + 56: 2, + 1: 2, + 21: 3, + 28: 0, + 12: 3, + 2: 2, + 18: 3, + 27: 3, + 25: 3, + 3: 3, + 24: 3, + 10: 3, + 5: 0, + 13: 2, + 4: 3, + 15: 1, + 26: 3, + 7: 3, + 8: 3, + 9: 3, + 14: 3, + 32: 3, + 57: 2, + 58: 0, + 11: 3, + 22: 0, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 1, + 34: 1, + 17: 3, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 1}, + 21:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 1, + 45: 0, + 53: 0, + 60: 1, + 16: 2, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 1, + 51: 0, + 38: 0, + 62: 0, + 43: 1, + 56: 0, + 1: 3, + 21: 2, + 28: 0, + 12: 3, + 2: 3, + 18: 0, + 27: 3, + 25: 1, + 3: 3, + 24: 2, + 10: 3, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 3, + 7: 1, + 8: 2, + 9: 2, + 14: 2, + 32: 1, + 57: 0, + 58: 1, + 11: 3, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 0, + 17: 0, + 30: 1, + 41: 0, + 6: 2, + 40: 0, + 19: 0}, + 28:{ + 23: 0, + 37: 1, + 47: 1, + 39: 1, + 29: 2, + 52: 0, + 36: 2, + 45: 2, + 53: 1, + 60: 0, + 16: 0, + 49: 0, + 20: 2, + 46: 1, + 42: 1, + 48: 2, + 44: 1, + 35: 1, + 31: 2, + 51: 2, + 38: 2, + 62: 0, + 43: 3, + 56: 0, + 1: 1, + 21: 1, + 28: 2, + 12: 2, + 2: 1, + 18: 1, + 27: 2, + 25: 2, + 3: 3, + 24: 1, + 10: 3, + 5: 0, + 13: 2, + 4: 3, + 15: 2, + 26: 2, + 7: 3, + 8: 3, + 9: 3, + 14: 1, + 32: 0, + 57: 1, + 58: 0, + 11: 2, + 22: 1, + 63: 1, + 54: 0, + 50: 0, + 55: 1, + 59: 0, + 33: 0, + 61: 1, + 34: 2, + 17: 2, + 30: 2, + 41: 1, + 6: 3, + 40: 0, + 19: 2}, + 12:{ + 23: 1, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 2, + 16: 3, + 49: 0, + 20: 3, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 1, + 31: 1, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 2, + 28: 1, + 12: 3, + 2: 3, + 18: 1, + 27: 3, + 25: 3, + 3: 2, + 24: 3, + 10: 2, + 5: 3, + 13: 3, + 4: 3, + 15: 1, + 26: 2, + 7: 3, + 8: 2, + 9: 2, + 14: 3, + 32: 1, + 57: 0, + 58: 1, + 11: 3, + 22: 1, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 2, + 40: 0, + 19: 0}, + 2:{ + 23: 2, + 37: 0, + 47: 2, + 39: 0, + 29: 3, + 52: 1, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 1, + 49: 0, + 20: 3, + 46: 1, + 42: 0, + 48: 1, + 44: 1, + 35: 0, + 31: 3, + 51: 0, + 38: 1, + 62: 0, + 43: 1, + 56: 0, + 1: 3, + 21: 3, + 28: 0, + 12: 3, + 2: 2, + 18: 3, + 27: 3, + 25: 3, + 3: 3, + 24: 3, + 10: 3, + 5: 0, + 13: 2, + 4: 3, + 15: 1, + 26: 3, + 7: 3, + 8: 3, + 9: 3, + 14: 3, + 32: 3, + 57: 2, + 58: 0, + 11: 3, + 22: 1, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 1, + 17: 3, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 18:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 2, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 1, + 28: 0, + 12: 3, + 2: 3, + 18: 2, + 27: 1, + 25: 1, + 3: 1, + 24: 1, + 10: 1, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 2, + 7: 1, + 8: 3, + 9: 3, + 14: 1, + 32: 2, + 57: 0, + 58: 0, + 11: 1, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 1, + 40: 0, + 19: 0}, + 27:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 1, + 31: 1, + 51: 0, + 38: 2, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 1, + 28: 0, + 12: 1, + 2: 3, + 18: 0, + 27: 2, + 25: 1, + 3: 2, + 24: 3, + 10: 2, + 5: 3, + 13: 3, + 4: 2, + 15: 0, + 26: 1, + 7: 2, + 8: 2, + 9: 3, + 14: 3, + 32: 1, + 57: 0, + 58: 0, + 11: 1, + 22: 0, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 2, + 40: 0, + 19: 0}, + 25:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 2, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 0, + 12: 2, + 2: 3, + 18: 0, + 27: 1, + 25: 2, + 3: 2, + 24: 3, + 10: 3, + 5: 3, + 13: 3, + 4: 3, + 15: 1, + 26: 1, + 7: 3, + 8: 3, + 9: 2, + 14: 3, + 32: 2, + 57: 1, + 58: 0, + 11: 1, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 3:{ + 23: 2, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 1, + 16: 3, + 49: 0, + 20: 3, + 46: 0, + 42: 1, + 48: 0, + 44: 0, + 35: 1, + 31: 2, + 51: 0, + 38: 1, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 2, + 28: 0, + 12: 3, + 2: 3, + 18: 2, + 27: 3, + 25: 1, + 3: 3, + 24: 2, + 10: 3, + 5: 3, + 13: 3, + 4: 3, + 15: 1, + 26: 3, + 7: 3, + 8: 3, + 9: 3, + 14: 3, + 32: 2, + 57: 1, + 58: 1, + 11: 3, + 22: 1, + 63: 1, + 54: 0, + 50: 0, + 55: 1, + 59: 0, + 33: 2, + 61: 0, + 34: 0, + 17: 3, + 30: 0, + 41: 1, + 6: 2, + 40: 0, + 19: 0}, + 24:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 1, + 16: 2, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 1, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 1, + 1: 3, + 21: 1, + 28: 1, + 12: 3, + 2: 3, + 18: 2, + 27: 1, + 25: 1, + 3: 2, + 24: 1, + 10: 2, + 5: 2, + 13: 3, + 4: 2, + 15: 0, + 26: 1, + 7: 2, + 8: 3, + 9: 2, + 14: 3, + 32: 2, + 57: 0, + 58: 2, + 11: 1, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 10:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 3, + 51: 0, + 38: 1, + 62: 0, + 43: 0, + 56: 1, + 1: 3, + 21: 2, + 28: 0, + 12: 2, + 2: 3, + 18: 1, + 27: 2, + 25: 2, + 3: 3, + 24: 2, + 10: 2, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 3, + 7: 2, + 8: 2, + 9: 2, + 14: 3, + 32: 0, + 57: 0, + 58: 1, + 11: 3, + 22: 0, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 3, + 61: 0, + 34: 1, + 17: 3, + 30: 1, + 41: 0, + 6: 3, + 40: 0, + 19: 1}, + 5:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 3, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 1, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 0, + 21: 3, + 28: 0, + 12: 3, + 2: 1, + 18: 3, + 27: 3, + 25: 2, + 3: 3, + 24: 2, + 10: 3, + 5: 1, + 13: 1, + 4: 3, + 15: 0, + 26: 2, + 7: 3, + 8: 3, + 9: 3, + 14: 2, + 32: 2, + 57: 0, + 58: 0, + 11: 3, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 0, + 17: 2, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 13:{ + 23: 1, + 37: 0, + 47: 0, + 39: 0, + 29: 3, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 3, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 3, + 51: 0, + 38: 0, + 62: 0, + 43: 1, + 56: 0, + 1: 2, + 21: 3, + 28: 0, + 12: 3, + 2: 2, + 18: 3, + 27: 3, + 25: 3, + 3: 3, + 24: 3, + 10: 3, + 5: 0, + 13: 2, + 4: 3, + 15: 1, + 26: 2, + 7: 3, + 8: 3, + 9: 3, + 14: 2, + 32: 2, + 57: 1, + 58: 0, + 11: 3, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 3, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 1}, + 4:{ + 23: 1, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 1, + 53: 0, + 60: 2, + 16: 3, + 49: 0, + 20: 3, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 2, + 28: 1, + 12: 3, + 2: 3, + 18: 1, + 27: 2, + 25: 3, + 3: 2, + 24: 2, + 10: 3, + 5: 3, + 13: 3, + 4: 3, + 15: 1, + 26: 3, + 7: 2, + 8: 3, + 9: 3, + 14: 3, + 32: 2, + 57: 0, + 58: 2, + 11: 3, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 0, + 17: 2, + 30: 0, + 41: 0, + 6: 1, + 40: 0, + 19: 0}, + 15:{ + 23: 0, + 37: 0, + 47: 1, + 39: 0, + 29: 0, + 52: 2, + 36: 1, + 45: 1, + 53: 1, + 60: 0, + 16: 3, + 49: 2, + 20: 0, + 46: 2, + 42: 1, + 48: 2, + 44: 1, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 1, + 24: 2, + 10: 1, + 5: 3, + 13: 3, + 4: 2, + 15: 2, + 26: 0, + 7: 1, + 8: 0, + 9: 0, + 14: 3, + 32: 0, + 57: 0, + 58: 2, + 11: 0, + 22: 2, + 63: 0, + 54: 1, + 50: 2, + 55: 0, + 59: 0, + 33: 3, + 61: 0, + 34: 1, + 17: 0, + 30: 2, + 41: 2, + 6: 3, + 40: 2, + 19: 2}, + 26:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 1, + 28: 0, + 12: 1, + 2: 3, + 18: 0, + 27: 1, + 25: 1, + 3: 2, + 24: 3, + 10: 1, + 5: 3, + 13: 3, + 4: 2, + 15: 0, + 26: 2, + 7: 2, + 8: 1, + 9: 1, + 14: 3, + 32: 0, + 57: 0, + 58: 1, + 11: 1, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 3, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 7:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 1, + 36: 0, + 45: 0, + 53: 0, + 60: 2, + 16: 3, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 1, + 38: 0, + 62: 0, + 43: 0, + 56: 1, + 1: 3, + 21: 1, + 28: 0, + 12: 3, + 2: 3, + 18: 0, + 27: 2, + 25: 3, + 3: 2, + 24: 2, + 10: 3, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 2, + 7: 3, + 8: 3, + 9: 3, + 14: 3, + 32: 2, + 57: 0, + 58: 1, + 11: 2, + 22: 0, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 2, + 61: 0, + 34: 0, + 17: 3, + 30: 0, + 41: 0, + 6: 2, + 40: 0, + 19: 0}, + 8:{ + 23: 1, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 1, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 0, + 20: 3, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 1, + 1: 3, + 21: 2, + 28: 1, + 12: 3, + 2: 3, + 18: 0, + 27: 2, + 25: 2, + 3: 2, + 24: 3, + 10: 3, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 3, + 7: 3, + 8: 3, + 9: 3, + 14: 3, + 32: 2, + 57: 0, + 58: 1, + 11: 2, + 22: 1, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 2, + 61: 0, + 34: 0, + 17: 2, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 1}, + 9:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 1, + 16: 3, + 49: 0, + 20: 2, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 1, + 1: 3, + 21: 3, + 28: 0, + 12: 3, + 2: 3, + 18: 2, + 27: 2, + 25: 2, + 3: 2, + 24: 2, + 10: 3, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 2, + 7: 3, + 8: 3, + 9: 3, + 14: 3, + 32: 3, + 57: 0, + 58: 2, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 3, + 61: 0, + 34: 0, + 17: 2, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 14:{ + 23: 3, + 37: 0, + 47: 0, + 39: 0, + 29: 3, + 52: 0, + 36: 0, + 45: 1, + 53: 0, + 60: 1, + 16: 0, + 49: 0, + 20: 3, + 46: 2, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 3, + 51: 0, + 38: 0, + 62: 0, + 43: 1, + 56: 2, + 1: 2, + 21: 3, + 28: 0, + 12: 3, + 2: 2, + 18: 2, + 27: 3, + 25: 3, + 3: 3, + 24: 2, + 10: 3, + 5: 0, + 13: 3, + 4: 3, + 15: 0, + 26: 3, + 7: 3, + 8: 3, + 9: 3, + 14: 3, + 32: 2, + 57: 2, + 58: 0, + 11: 3, + 22: 0, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 3, + 30: 1, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 32:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 3, + 21: 0, + 28: 0, + 12: 3, + 2: 3, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 1, + 10: 1, + 5: 3, + 13: 2, + 4: 3, + 15: 0, + 26: 1, + 7: 1, + 8: 2, + 9: 3, + 14: 3, + 32: 1, + 57: 0, + 58: 0, + 11: 0, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 2, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 1, + 40: 0, + 19: 0}, + 57:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 1, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 1, + 21: 0, + 28: 0, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 1, + 3: 0, + 24: 0, + 10: 1, + 5: 0, + 13: 0, + 4: 1, + 15: 0, + 26: 0, + 7: 0, + 8: 1, + 9: 0, + 14: 1, + 32: 0, + 57: 2, + 58: 0, + 11: 0, + 22: 0, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 0, + 40: 0, + 19: 0}, + 58:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 1, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 1, + 16: 0, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 0, + 21: 1, + 28: 0, + 12: 2, + 2: 1, + 18: 0, + 27: 0, + 25: 0, + 3: 2, + 24: 2, + 10: 1, + 5: 0, + 13: 0, + 4: 2, + 15: 0, + 26: 0, + 7: 1, + 8: 2, + 9: 1, + 14: 0, + 32: 0, + 57: 0, + 58: 0, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 2, + 40: 0, + 19: 0}, + 11:{ + 23: 1, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 1, + 16: 3, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 1, + 51: 0, + 38: 0, + 62: 0, + 43: 1, + 56: 1, + 1: 3, + 21: 1, + 28: 0, + 12: 2, + 2: 3, + 18: 0, + 27: 2, + 25: 2, + 3: 2, + 24: 1, + 10: 2, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 1, + 7: 2, + 8: 1, + 9: 2, + 14: 3, + 32: 0, + 57: 0, + 58: 1, + 11: 3, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 3, + 61: 0, + 34: 0, + 17: 2, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 22:{ + 23: 2, + 37: 2, + 47: 1, + 39: 2, + 29: 3, + 52: 1, + 36: 2, + 45: 2, + 53: 1, + 60: 0, + 16: 0, + 49: 0, + 20: 3, + 46: 2, + 42: 2, + 48: 2, + 44: 1, + 35: 1, + 31: 3, + 51: 2, + 38: 2, + 62: 0, + 43: 2, + 56: 1, + 1: 1, + 21: 2, + 28: 1, + 12: 2, + 2: 2, + 18: 3, + 27: 2, + 25: 2, + 3: 3, + 24: 2, + 10: 3, + 5: 0, + 13: 2, + 4: 3, + 15: 2, + 26: 2, + 7: 3, + 8: 3, + 9: 3, + 14: 0, + 32: 2, + 57: 0, + 58: 0, + 11: 3, + 22: 2, + 63: 1, + 54: 0, + 50: 0, + 55: 2, + 59: 1, + 33: 0, + 61: 0, + 34: 2, + 17: 2, + 30: 2, + 41: 1, + 6: 3, + 40: 1, + 19: 2}, + 63:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 0, + 21: 0, + 28: 0, + 12: 0, + 2: 1, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 0, + 13: 2, + 4: 0, + 15: 0, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 0, + 40: 0, + 19: 0}, + 54:{ + 23: 0, + 37: 0, + 47: 1, + 39: 1, + 29: 0, + 52: 0, + 36: 1, + 45: 1, + 53: 1, + 60: 0, + 16: 0, + 49: 0, + 20: 0, + 46: 0, + 42: 1, + 48: 1, + 44: 0, + 35: 0, + 31: 0, + 51: 1, + 38: 1, + 62: 0, + 43: 2, + 56: 0, + 1: 0, + 21: 1, + 28: 0, + 12: 1, + 2: 0, + 18: 0, + 27: 1, + 25: 0, + 3: 3, + 24: 0, + 10: 1, + 5: 0, + 13: 0, + 4: 2, + 15: 1, + 26: 0, + 7: 2, + 8: 0, + 9: 1, + 14: 0, + 32: 2, + 57: 0, + 58: 0, + 11: 0, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 2, + 59: 0, + 33: 0, + 61: 0, + 34: 1, + 17: 0, + 30: 0, + 41: 0, + 6: 2, + 40: 0, + 19: 1}, + 50:{ + 23: 0, + 37: 0, + 47: 1, + 39: 1, + 29: 2, + 52: 0, + 36: 1, + 45: 2, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 1, + 46: 1, + 42: 2, + 48: 2, + 44: 1, + 35: 0, + 31: 0, + 51: 1, + 38: 1, + 62: 0, + 43: 2, + 56: 0, + 1: 0, + 21: 2, + 28: 1, + 12: 2, + 2: 0, + 18: 1, + 27: 1, + 25: 1, + 3: 2, + 24: 0, + 10: 2, + 5: 0, + 13: 0, + 4: 3, + 15: 2, + 26: 2, + 7: 3, + 8: 1, + 9: 2, + 14: 0, + 32: 1, + 57: 0, + 58: 0, + 11: 0, + 22: 1, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 2, + 17: 2, + 30: 1, + 41: 0, + 6: 2, + 40: 0, + 19: 1}, + 55:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 2, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 1, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 1, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 1, + 62: 0, + 43: 0, + 56: 0, + 1: 2, + 21: 0, + 28: 2, + 12: 0, + 2: 2, + 18: 0, + 27: 1, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 1, + 13: 1, + 4: 1, + 15: 0, + 26: 0, + 7: 0, + 8: 0, + 9: 1, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 1, + 63: 0, + 54: 0, + 50: 1, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 1, + 17: 0, + 30: 1, + 41: 1, + 6: 0, + 40: 0, + 19: 1}, + 59:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 1, + 45: 0, + 53: 0, + 60: 0, + 16: 1, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 0, + 1: 2, + 21: 0, + 28: 0, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 0, + 10: 0, + 5: 0, + 13: 2, + 4: 0, + 15: 1, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 2, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 1, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 1, + 40: 1, + 19: 0}, + 33:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 3, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 0, + 38: 1, + 62: 0, + 43: 0, + 56: 0, + 1: 0, + 21: 3, + 28: 0, + 12: 2, + 2: 0, + 18: 2, + 27: 1, + 25: 3, + 3: 3, + 24: 0, + 10: 3, + 5: 0, + 13: 0, + 4: 3, + 15: 0, + 26: 1, + 7: 3, + 8: 2, + 9: 3, + 14: 0, + 32: 2, + 57: 0, + 58: 0, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 0, + 34: 0, + 17: 1, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 61:{ + 23: 0, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 0, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 0, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 1, + 1: 2, + 21: 0, + 28: 0, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 1, + 10: 0, + 5: 0, + 13: 1, + 4: 1, + 15: 0, + 26: 0, + 7: 0, + 8: 0, + 9: 0, + 14: 1, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 1, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 0, + 61: 1, + 34: 0, + 17: 0, + 30: 0, + 41: 0, + 6: 1, + 40: 0, + 19: 0}, + 34:{ + 23: 0, + 37: 1, + 47: 1, + 39: 0, + 29: 0, + 52: 2, + 36: 1, + 45: 1, + 53: 0, + 60: 0, + 16: 3, + 49: 1, + 20: 0, + 46: 1, + 42: 1, + 48: 2, + 44: 1, + 35: 1, + 31: 1, + 51: 1, + 38: 1, + 62: 0, + 43: 0, + 56: 1, + 1: 3, + 21: 1, + 28: 2, + 12: 1, + 2: 3, + 18: 0, + 27: 2, + 25: 2, + 3: 1, + 24: 2, + 10: 1, + 5: 2, + 13: 3, + 4: 2, + 15: 2, + 26: 0, + 7: 0, + 8: 3, + 9: 1, + 14: 3, + 32: 0, + 57: 0, + 58: 0, + 11: 1, + 22: 2, + 63: 0, + 54: 1, + 50: 2, + 55: 0, + 59: 0, + 33: 2, + 61: 0, + 34: 2, + 17: 0, + 30: 2, + 41: 1, + 6: 1, + 40: 2, + 19: 1}, + 17:{ + 23: 0, + 37: 0, + 47: 1, + 39: 0, + 29: 0, + 52: 0, + 36: 0, + 45: 0, + 53: 0, + 60: 1, + 16: 1, + 49: 0, + 20: 1, + 46: 0, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 1, + 51: 0, + 38: 0, + 62: 0, + 43: 0, + 56: 1, + 1: 3, + 21: 0, + 28: 0, + 12: 1, + 2: 3, + 18: 1, + 27: 2, + 25: 0, + 3: 1, + 24: 1, + 10: 2, + 5: 3, + 13: 2, + 4: 3, + 15: 0, + 26: 2, + 7: 2, + 8: 3, + 9: 2, + 14: 3, + 32: 1, + 57: 1, + 58: 0, + 11: 0, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 0, + 17: 2, + 30: 0, + 41: 0, + 6: 2, + 40: 0, + 19: 0}, + 30:{ + 23: 0, + 37: 2, + 47: 1, + 39: 0, + 29: 0, + 52: 2, + 36: 1, + 45: 0, + 53: 1, + 60: 0, + 16: 3, + 49: 0, + 20: 1, + 46: 2, + 42: 2, + 48: 1, + 44: 1, + 35: 0, + 31: 1, + 51: 0, + 38: 2, + 62: 0, + 43: 2, + 56: 0, + 1: 3, + 21: 0, + 28: 2, + 12: 0, + 2: 2, + 18: 0, + 27: 0, + 25: 0, + 3: 0, + 24: 3, + 10: 1, + 5: 2, + 13: 3, + 4: 0, + 15: 1, + 26: 0, + 7: 1, + 8: 0, + 9: 0, + 14: 3, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 2, + 63: 0, + 54: 2, + 50: 2, + 55: 0, + 59: 0, + 33: 1, + 61: 0, + 34: 2, + 17: 0, + 30: 1, + 41: 2, + 6: 2, + 40: 2, + 19: 1}, + 41:{ + 23: 0, + 37: 0, + 47: 1, + 39: 1, + 29: 1, + 52: 0, + 36: 2, + 45: 2, + 53: 0, + 60: 0, + 16: 0, + 49: 0, + 20: 2, + 46: 1, + 42: 1, + 48: 2, + 44: 0, + 35: 1, + 31: 1, + 51: 1, + 38: 1, + 62: 0, + 43: 2, + 56: 0, + 1: 1, + 21: 2, + 28: 1, + 12: 2, + 2: 1, + 18: 0, + 27: 3, + 25: 2, + 3: 2, + 24: 2, + 10: 2, + 5: 0, + 13: 1, + 4: 3, + 15: 1, + 26: 1, + 7: 3, + 8: 3, + 9: 2, + 14: 0, + 32: 0, + 57: 1, + 58: 0, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 1, + 59: 1, + 33: 0, + 61: 0, + 34: 1, + 17: 1, + 30: 2, + 41: 0, + 6: 3, + 40: 0, + 19: 1}, + 6:{ + 23: 2, + 37: 0, + 47: 0, + 39: 0, + 29: 0, + 52: 0, + 36: 1, + 45: 0, + 53: 0, + 60: 2, + 16: 3, + 49: 0, + 20: 3, + 46: 1, + 42: 0, + 48: 0, + 44: 0, + 35: 0, + 31: 2, + 51: 0, + 38: 0, + 62: 0, + 43: 2, + 56: 1, + 1: 3, + 21: 2, + 28: 1, + 12: 3, + 2: 3, + 18: 3, + 27: 3, + 25: 2, + 3: 3, + 24: 3, + 10: 3, + 5: 3, + 13: 3, + 4: 3, + 15: 0, + 26: 3, + 7: 3, + 8: 3, + 9: 3, + 14: 3, + 32: 3, + 57: 1, + 58: 1, + 11: 3, + 22: 0, + 63: 1, + 54: 0, + 50: 0, + 55: 0, + 59: 0, + 33: 2, + 61: 0, + 34: 0, + 17: 3, + 30: 0, + 41: 0, + 6: 3, + 40: 0, + 19: 0}, + 40:{ + 23: 0, + 37: 0, + 47: 1, + 39: 1, + 29: 1, + 52: 0, + 36: 1, + 45: 2, + 53: 1, + 60: 0, + 16: 0, + 49: 0, + 20: 2, + 46: 1, + 42: 1, + 48: 2, + 44: 2, + 35: 1, + 31: 1, + 51: 0, + 38: 1, + 62: 0, + 43: 2, + 56: 1, + 1: 0, + 21: 2, + 28: 0, + 12: 2, + 2: 0, + 18: 3, + 27: 0, + 25: 2, + 3: 3, + 24: 2, + 10: 1, + 5: 0, + 13: 1, + 4: 3, + 15: 2, + 26: 0, + 7: 3, + 8: 2, + 9: 2, + 14: 1, + 32: 3, + 57: 0, + 58: 0, + 11: 2, + 22: 0, + 63: 0, + 54: 0, + 50: 0, + 55: 1, + 59: 0, + 33: 0, + 61: 0, + 34: 2, + 17: 1, + 30: 2, + 41: 0, + 6: 2, + 40: 1, + 19: 2}, + 19:{ + 23: 0, + 37: 0, + 47: 1, + 39: 0, + 29: 0, + 52: 2, + 36: 1, + 45: 0, + 53: 0, + 60: 0, + 16: 3, + 49: 2, + 20: 0, + 46: 1, + 42: 1, + 48: 1, + 44: 1, + 35: 1, + 31: 0, + 51: 1, + 38: 1, + 62: 0, + 43: 1, + 56: 0, + 1: 3, + 21: 1, + 28: 2, + 12: 0, + 2: 3, + 18: 0, + 27: 2, + 25: 1, + 3: 1, + 24: 0, + 10: 2, + 5: 2, + 13: 3, + 4: 0, + 15: 0, + 26: 1, + 7: 3, + 8: 0, + 9: 0, + 14: 3, + 32: 0, + 57: 0, + 58: 0, + 11: 0, + 22: 2, + 63: 0, + 54: 1, + 50: 2, + 55: 0, + 59: 0, + 33: 1, + 61: 1, + 34: 2, + 17: 0, + 30: 1, + 41: 1, + 6: 1, + 40: 1, + 19: 1}} +ISO_8859_9_TURKISH_CHAR_TO_ORDER = { + 0: 255, + 1: 255, + 2: 255, + 3: 255, + 4: 255, + 5: 255, + 6: 255, + 7: 255, + 8: 255, + 9: 255, + 10: 255, + 11: 255, + 12: 255, + 13: 255, + 14: 255, + 15: 255, + 16: 255, + 17: 255, + 18: 255, + 19: 255, + 20: 255, + 21: 255, + 22: 255, + 23: 255, + 24: 255, + 25: 255, + 26: 255, + 27: 255, + 28: 255, + 29: 255, + 30: 255, + 31: 255, + 32: 255, + 33: 255, + 34: 255, + 35: 255, + 36: 255, + 37: 255, + 38: 255, + 39: 255, + 40: 255, + 41: 255, + 42: 255, + 43: 255, + 44: 255, + 45: 255, + 46: 255, + 47: 255, + 48: 255, + 49: 255, + 50: 255, + 51: 255, + 52: 255, + 53: 255, + 54: 255, + 55: 255, + 56: 255, + 57: 255, + 58: 255, + 59: 255, + 60: 255, + 61: 255, + 62: 255, + 63: 255, + 64: 255, + 65: 23, + 66: 37, + 67: 47, + 68: 39, + 69: 29, + 70: 52, + 71: 36, + 72: 45, + 73: 53, + 74: 60, + 75: 16, + 76: 49, + 77: 20, + 78: 46, + 79: 42, + 80: 48, + 81: 69, + 82: 44, + 83: 35, + 84: 31, + 85: 51, + 86: 38, + 87: 62, + 88: 65, + 89: 43, + 90: 56, + 91: 255, + 92: 255, + 93: 255, + 94: 255, + 95: 255, + 96: 255, + 97: 1, + 98: 21, + 99: 28, + 100: 12, + 101: 2, + 102: 18, + 103: 27, + 104: 25, + 105: 3, + 106: 24, + 107: 10, + 108: 5, + 109: 13, + 110: 4, + 111: 15, + 112: 26, + 113: 64, + 114: 7, + 115: 8, + 116: 9, + 117: 14, + 118: 32, + 119: 57, + 120: 58, + 121: 11, + 122: 22, + 123: 255, + 124: 255, + 125: 255, + 126: 255, + 127: 255, + 128: 180, + 129: 179, + 130: 178, + 131: 177, + 132: 176, + 133: 175, + 134: 174, + 135: 173, + 136: 172, + 137: 171, + 138: 170, + 139: 169, + 140: 168, + 141: 167, + 142: 166, + 143: 165, + 144: 164, + 145: 163, + 146: 162, + 147: 161, + 148: 160, + 149: 159, + 150: 101, + 151: 158, + 152: 157, + 153: 156, + 154: 155, + 155: 154, + 156: 153, + 157: 152, + 158: 151, + 159: 106, + 160: 150, + 161: 149, + 162: 148, + 163: 147, + 164: 146, + 165: 145, + 166: 144, + 167: 100, + 168: 143, + 169: 142, + 170: 141, + 171: 140, + 172: 139, + 173: 138, + 174: 137, + 175: 136, + 176: 94, + 177: 80, + 178: 93, + 179: 135, + 180: 105, + 181: 134, + 182: 133, + 183: 63, + 184: 132, + 185: 131, + 186: 130, + 187: 129, + 188: 128, + 189: 127, + 190: 126, + 191: 125, + 192: 124, + 193: 104, + 194: 73, + 195: 99, + 196: 79, + 197: 85, + 198: 123, + 199: 54, + 200: 122, + 201: 98, + 202: 92, + 203: 121, + 204: 120, + 205: 91, + 206: 103, + 207: 119, + 208: 68, + 209: 118, + 210: 117, + 211: 97, + 212: 116, + 213: 115, + 214: 50, + 215: 90, + 216: 114, + 217: 113, + 218: 112, + 219: 111, + 220: 55, + 221: 41, + 222: 40, + 223: 86, + 224: 89, + 225: 70, + 226: 59, + 227: 78, + 228: 71, + 229: 82, + 230: 88, + 231: 33, + 232: 77, + 233: 66, + 234: 84, + 235: 83, + 236: 110, + 237: 75, + 238: 61, + 239: 96, + 240: 30, + 241: 67, + 242: 109, + 243: 74, + 244: 87, + 245: 102, + 246: 34, + 247: 95, + 248: 81, + 249: 108, + 250: 76, + 251: 72, + 252: 17, + 253: 6, + 254: 19, + 255: 107} +ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel(charset_name="ISO-8859-9", language="Turkish", + char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER, + language_model=TURKISH_LANG_MODEL, + typical_positive_ratio=0.97029, + keep_ascii_letters=True, + alphabet="ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş") diff --git a/APPS_UNCOMPILED/lib/chardet/latin1prober.py b/APPS_UNCOMPILED/lib/chardet/latin1prober.py new file mode 100644 index 0000000..06f4aed --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/latin1prober.py @@ -0,0 +1,104 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/latin1prober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 5370 bytes +from .charsetprober import CharSetProber +from .enums import ProbingState +FREQ_CAT_NUM = 4 +UDF = 0 +OTH = 1 +ASC = 2 +ASS = 3 +ACV = 4 +ACO = 5 +ASV = 6 +ASO = 7 +CLASS_NUM = 8 +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO) +Latin1ClassModel = (0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, + 3, 3, 3, 0, 3, 3, 3, 1, 1, 3, 3, 0, 3, 3, 3, 1, 2, 1, 2, 0, 3, + 3, 3, 3, 3, 3, 3, 0, 3, 1, 3, 1, 1, 1, 3, 0, 3, 1, 3, 1, 1, 3, + 3) + +class Latin1Prober(CharSetProber): + + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[self._last_char_class * CLASS_NUM + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + else: + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = (self._freq_counter[3] - self._freq_counter[1] * 20.0) / total + if confidence < 0.0: + confidence = 0.0 + confidence = confidence * 0.73 + return confidence diff --git a/APPS_UNCOMPILED/lib/chardet/mbcharsetprober.py b/APPS_UNCOMPILED/lib/chardet/mbcharsetprober.py new file mode 100644 index 0000000..b1420d3 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/mbcharsetprober.py @@ -0,0 +1,63 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/mbcharsetprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 3413 bytes +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + +class MultiByteCharSetProber(CharSetProber): + __doc__ = "\n MultiByteCharSetProber\n " + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [ + 0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug("%s %s prober hit error at byte %s", self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[(i - 1)[:i + 1]], char_len) + + self._last_char[0] = byte_str[-1] + if self.state == ProbingState.DETECTING: + if self.distribution_analyzer.got_enough_data(): + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/APPS_UNCOMPILED/lib/chardet/mbcsgroupprober.py b/APPS_UNCOMPILED/lib/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..a799f07 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/mbcsgroupprober.py @@ -0,0 +1,31 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/mbcsgroupprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 2012 bytes +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + +class MBCSGroupProber(CharSetGroupProber): + + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber()] + self.reset() diff --git a/APPS_UNCOMPILED/lib/chardet/mbcssm.py b/APPS_UNCOMPILED/lib/chardet/mbcssm.py new file mode 100644 index 0000000..b2b717c --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/mbcssm.py @@ -0,0 +1,280 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/mbcssm.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 25481 bytes +from .enums import MachineState +BIG5_CLS = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 0) +BIG5_ST = ( + MachineState.ERROR, MachineState.START, MachineState.START, 3, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, + MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START) +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) +BIG5_SM_MODEL = { + 'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': '"Big5"'} +CP949_CLS = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 1, 1, + 1, 1, 1, 1, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 1, 1, 1, 1, 1, 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 7, 7, 7, 7, 7, 7, 7, 7, 7, + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 9, 2, 2, 3, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 0) +CP949_ST = ( + MachineState.ERROR, MachineState.START, 3, MachineState.ERROR, MachineState.START, MachineState.START, 4, 5, MachineState.ERROR, 6, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, + MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, + MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, + MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START) +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) +CP949_SM_MODEL = { + 'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': '"CP949"'} +EUCJP_CLS = (4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 1, 3, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 5) +EUCJP_ST = ( + 3, 4, 3, 5, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.START, MachineState.ERROR, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 3, MachineState.ERROR, + 3, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START) +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) +EUCJP_SM_MODEL = { + 'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': '"EUC-JP"'} +EUCKR_CLS = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 0) +EUCKR_ST = ( + MachineState.ERROR, MachineState.START, 3, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START) +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) +EUCKR_SM_MODEL = { + 'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': '"EUC-KR"'} +EUCTW_CLS = (2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, + 4, 4, 4, 4, 4, 4, 5, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 0) +EUCTW_ST = ( + MachineState.ERROR, MachineState.ERROR, MachineState.START, 3, 3, 3, 4, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.START, MachineState.ERROR, + MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + 5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.ERROR, MachineState.START, MachineState.START, + MachineState.START, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START) +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) +EUCTW_SM_MODEL = { + 'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': '"x-euc-tw"'} +GB2312_CLS = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 0) +GB2312_ST = ( + MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, 3, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.START, + 4, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, 5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.START) +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) +GB2312_SM_MODEL = { + 'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': '"GB2312"'} +SJIS_CLS = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 3, 3, 3, 3, 2, 2, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, + 0, 0) +SJIS_ST = ( + MachineState.ERROR, MachineState.START, MachineState.START, 3, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START) +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) +SJIS_SM_MODEL = { + 'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': '"Shift_JIS"'} +UCS2BE_CLS = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 4, 5) +UCS2BE_ST = ( + 5, 7, 7, MachineState.ERROR, 4, 3, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, 6, 6, 6, 6, MachineState.ERROR, MachineState.ERROR, + 6, 6, 6, 6, 6, MachineState.ITS_ME, 6, 6, + 6, 6, 6, 6, 5, 7, 7, MachineState.ERROR, + 5, 8, 6, 6, MachineState.ERROR, 6, 6, 6, + 6, 6, 6, 6, MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START) +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) +UCS2BE_SM_MODEL = { + 'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': '"UTF-16BE"'} +UCS2LE_CLS = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 4, 5) +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, 5, 5, 5, MachineState.ERROR, MachineState.ITS_ME, MachineState.ERROR, + 5, 5, 5, MachineState.ERROR, 5, MachineState.ERROR, 6, 6, + 7, 6, 8, 8, 5, 5, 5, MachineState.ERROR, + 5, 5, 5, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 5, 5, + 5, 5, 5, MachineState.ERROR, 5, MachineState.ERROR, MachineState.START, MachineState.START) +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) +UCS2LE_SM_MODEL = { + 'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': '"UTF-16LE"'} +UTF8_CLS = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 0, 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 8, 8, 8, 8, 8, 8, + 8, 8, 8, 8, 8, 8, 9, 8, 8, 10, 11, 11, 11, 11, 11, 11, 11, 12, 13, 13, + 13, 14, 15, 0, 0) +UTF8_ST = ( + MachineState.ERROR, MachineState.START, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 12, 10, + 9, 11, 8, 7, 6, 5, 4, 3, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, MachineState.ITS_ME, + MachineState.ERROR, MachineState.ERROR, 5, 5, 5, 5, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 5, 5, 5, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, 7, 7, 7, 7, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 7, 7, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, 9, 9, 9, 9, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 9, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, 12, 12, 12, 12, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, 12, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, 12, 12, 12, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.START, MachineState.START, MachineState.START, MachineState.START, MachineState.ERROR, MachineState.ERROR, + MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR, MachineState.ERROR) +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) +UTF8_SM_MODEL = { + 'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': '"UTF-8"'} diff --git a/APPS_UNCOMPILED/lib/chardet/metadata/__init__.py b/APPS_UNCOMPILED/lib/chardet/metadata/__init__.py new file mode 100644 index 0000000..d179900 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/metadata/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/metadata/__init__.py +# Compiled at: 2024-04-18 03:12:56 +pass diff --git a/APPS_UNCOMPILED/lib/chardet/metadata/languages.py b/APPS_UNCOMPILED/lib/chardet/metadata/languages.py new file mode 100644 index 0000000..bf4b14b --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/metadata/languages.py @@ -0,0 +1,272 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/metadata/languages.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 19474 bytes +""" +Metadata about languages used by our model training code for our +SingleByteCharSetProbers. Could be used for other things in the future. + +This code is based on the language metadata from the uchardet project. +""" +from __future__ import absolute_import, print_function +from string import ascii_letters + +class Language(object): + __doc__ = "Metadata about a language useful for training models\n\n :ivar name: The human name for the language, in English.\n :type name: str\n :ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise,\n or use another catalog as a last resort.\n :type iso_code: str\n :ivar use_ascii: Whether or not ASCII letters should be included in trained\n models.\n :type use_ascii: bool\n :ivar charsets: The charsets we want to support and create data for.\n :type charsets: list of str\n :ivar alphabet: The characters in the language's alphabet. If `use_ascii` is\n `True`, you only need to add those not in the ASCII set.\n :type alphabet: str\n :ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling\n Wikipedia for training data.\n :type wiki_start_pages: list of str\n " + + def __init__(self, name=None, iso_code=None, use_ascii=True, charsets=None, alphabet=None, wiki_start_pages=None): + super(Language, self).__init__() + self.name = name + self.iso_code = iso_code + self.use_ascii = use_ascii + self.charsets = charsets + if self.use_ascii: + if alphabet: + alphabet += ascii_letters + else: + alphabet = ascii_letters + else: + if not alphabet: + raise ValueError("Must supply alphabet if use_ascii is False") + self.alphabet = "".join(sorted(set(alphabet))) if alphabet else None + self.wiki_start_pages = wiki_start_pages + + def __repr__(self): + return "{}({})".format(self.__class__.__name__, ", ".join(("{}={!r}".format(k, v) for k, v in self.__dict__.items() if not k.startswith("_")))) + + +LANGUAGES = {'Arabic':Language(name="Arabic", iso_code="ar", + use_ascii=False, + charsets=[ + "ISO-8859-6", "WINDOWS-1256", + "CP720", "CP864"], + alphabet="ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـفقكلمنهوىيًٌٍَُِّ", + wiki_start_pages=[ + "الصفحة_الرئيسية"]), + 'Belarusian':Language(name="Belarusian", iso_code="be", + use_ascii=False, + charsets=[ + "ISO-8859-5", "WINDOWS-1251", + "IBM866", "MacCyrillic"], + alphabet="АБВГДЕЁЖЗІЙКЛМНОПРСТУЎФХЦЧШЫЬЭЮЯабвгдеёжзійклмнопрстуўфхцчшыьэюяʼ", + wiki_start_pages=[ + "Галоўная_старонка"]), + 'Bulgarian':Language(name="Bulgarian", iso_code="bg", + use_ascii=False, + charsets=[ + "ISO-8859-5", "WINDOWS-1251", + "IBM855"], + alphabet="АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрстуфхцчшщъьюя", + wiki_start_pages=[ + "Начална_страница"]), + 'Czech':Language(name="Czech", iso_code="cz", + use_ascii=True, + charsets=[ + "ISO-8859-2", "WINDOWS-1250"], + alphabet="áčďéěíňóřšťúůýžÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ", + wiki_start_pages=[ + "Hlavní_strana"]), + 'Danish':Language(name="Danish", iso_code="da", + use_ascii=True, + charsets=[ + "ISO-8859-1", "ISO-8859-15", + "WINDOWS-1252"], + alphabet="æøåÆØÅ", + wiki_start_pages=[ + "Forside"]), + 'German':Language(name="German", iso_code="de", + use_ascii=True, + charsets=[ + "ISO-8859-1", "WINDOWS-1252"], + alphabet="äöüßÄÖÜ", + wiki_start_pages=[ + "Wikipedia:Hauptseite"]), + 'Greek':Language(name="Greek", iso_code="el", + use_ascii=False, + charsets=[ + "ISO-8859-7", "WINDOWS-1253"], + alphabet="αβγδεζηθικλμνξοπρσςτυφχψωάέήίόύώΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎΏ", + wiki_start_pages=[ + "Πύλη:Κύρια"]), + 'English':Language(name="English", iso_code="en", + use_ascii=True, + charsets=[ + "ISO-8859-1", "WINDOWS-1252"], + wiki_start_pages=[ + "Main_Page"]), + 'Esperanto':Language(name="Esperanto", iso_code="eo", + use_ascii=False, + charsets=[ + "ISO-8859-3"], + alphabet="abcĉdefgĝhĥijĵklmnoprsŝtuŭvzABCĈDEFGĜHĤIJĴKLMNOPRSŜTUŬVZ", + wiki_start_pages=[ + "Vikipedio:Ĉefpaĝo"]), + 'Spanish':Language(name="Spanish", iso_code="es", + use_ascii=True, + charsets=[ + "ISO-8859-1", "ISO-8859-15", + "WINDOWS-1252"], + alphabet="ñáéíóúüÑÁÉÍÓÚÜ", + wiki_start_pages=[ + "Wikipedia:Portada"]), + 'Estonian':Language(name="Estonian", iso_code="et", + use_ascii=False, + charsets=[ + "ISO-8859-4", "ISO-8859-13", + "WINDOWS-1257"], + alphabet="ABDEGHIJKLMNOPRSTUVÕÄÖÜabdeghijklmnoprstuvõäöü", + wiki_start_pages=[ + "Esileht"]), + 'Finnish':Language(name="Finnish", iso_code="fi", + use_ascii=True, + charsets=[ + "ISO-8859-1", "ISO-8859-15", + "WINDOWS-1252"], + alphabet="ÅÄÖŠŽåäöšž", + wiki_start_pages=[ + "Wikipedia:Etusivu"]), + 'French':Language(name="French", iso_code="fr", + use_ascii=True, + charsets=[ + "ISO-8859-1", "ISO-8859-15", + "WINDOWS-1252"], + alphabet="œàâçèéîïùûêŒÀÂÇÈÉÎÏÙÛÊ", + wiki_start_pages=[ + "Wikipédia:Accueil_principal", + "Bœuf (animal)"]), + 'Hebrew':Language(name="Hebrew", iso_code="he", + use_ascii=False, + charsets=[ + "ISO-8859-8", "WINDOWS-1255"], + alphabet="אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ", + wiki_start_pages=[ + "עמוד_ראשי"]), + 'Croatian':Language(name="Croatian", iso_code="hr", + use_ascii=False, + charsets=[ + "ISO-8859-2", "WINDOWS-1250"], + alphabet="abcčćdđefghijklmnoprsštuvzžABCČĆDĐEFGHIJKLMNOPRSŠTUVZŽ", + wiki_start_pages=[ + "Glavna_stranica"]), + 'Hungarian':Language(name="Hungarian", iso_code="hu", + use_ascii=False, + charsets=[ + "ISO-8859-2", "WINDOWS-1250"], + alphabet="abcdefghijklmnoprstuvzáéíóöőúüűABCDEFGHIJKLMNOPRSTUVZÁÉÍÓÖŐÚÜŰ", + wiki_start_pages=[ + "Kezdőlap"]), + 'Italian':Language(name="Italian", iso_code="it", + use_ascii=True, + charsets=[ + "ISO-8859-1", "ISO-8859-15", + "WINDOWS-1252"], + alphabet="ÀÈÉÌÒÓÙàèéìòóù", + wiki_start_pages=[ + "Pagina_principale"]), + 'Lithuanian':Language(name="Lithuanian", iso_code="lt", + use_ascii=False, + charsets=[ + "ISO-8859-13", "WINDOWS-1257", + "ISO-8859-4"], + alphabet="AĄBCČDEĘĖFGHIĮYJKLMNOPRSŠTUŲŪVZŽaąbcčdeęėfghiįyjklmnoprsštuųūvzž", + wiki_start_pages=[ + "Pagrindinis_puslapis"]), + 'Latvian':Language(name="Latvian", iso_code="lv", + use_ascii=False, + charsets=[ + "ISO-8859-13", "WINDOWS-1257", + "ISO-8859-4"], + alphabet="AĀBCČDEĒFGĢHIĪJKĶLĻMNŅOPRSŠTUŪVZŽaābcčdeēfgģhiījkķlļmnņoprsštuūvzž", + wiki_start_pages=[ + "Sākumlapa"]), + 'Macedonian':Language(name="Macedonian", iso_code="mk", + use_ascii=False, + charsets=[ + "ISO-8859-5", "WINDOWS-1251", + "MacCyrillic", "IBM855"], + alphabet="АБВГДЃЕЖЗЅИЈКЛЉМНЊОПРСТЌУФХЦЧЏШабвгдѓежзѕијклљмнњопрстќуфхцчџш", + wiki_start_pages=[ + "Главна_страница"]), + 'Dutch':Language(name="Dutch", iso_code="nl", + use_ascii=True, + charsets=[ + "ISO-8859-1", "WINDOWS-1252"], + wiki_start_pages=[ + "Hoofdpagina"]), + 'Polish':Language(name="Polish", iso_code="pl", + use_ascii=False, + charsets=[ + "ISO-8859-2", "WINDOWS-1250"], + alphabet="AĄBCĆDEĘFGHIJKLŁMNŃOÓPRSŚTUWYZŹŻaąbcćdeęfghijklłmnńoóprsśtuwyzźż", + wiki_start_pages=[ + "Wikipedia:Strona_główna"]), + 'Portuguese':Language(name="Portuguese", iso_code="pt", + use_ascii=True, + charsets=[ + "ISO-8859-1", "ISO-8859-15", + "WINDOWS-1252"], + alphabet="ÁÂÃÀÇÉÊÍÓÔÕÚáâãàçéêíóôõú", + wiki_start_pages=[ + "Wikipédia:Página_principal"]), + 'Romanian':Language(name="Romanian", iso_code="ro", + use_ascii=True, + charsets=[ + "ISO-8859-2", "WINDOWS-1250"], + alphabet="ăâîșțĂÂÎȘȚ", + wiki_start_pages=[ + "Pagina_principală"]), + 'Russian':Language(name="Russian", iso_code="ru", + use_ascii=False, + charsets=[ + 'ISO-8859-5', 'WINDOWS-1251', + 'KOI8-R', 'MacCyrillic', 'IBM866', + 'IBM855'], + alphabet="абвгдеёжзийклмнопрстуфхцчшщъыьэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ", + wiki_start_pages=[ + "Заглавная_страница"]), + 'Slovak':Language(name="Slovak", iso_code="sk", + use_ascii=True, + charsets=[ + "ISO-8859-2", "WINDOWS-1250"], + alphabet="áäčďéíĺľňóôŕšťúýžÁÄČĎÉÍĹĽŇÓÔŔŠŤÚÝŽ", + wiki_start_pages=[ + "Hlavná_stránka"]), + 'Slovene':Language(name="Slovene", iso_code="sl", + use_ascii=False, + charsets=[ + "ISO-8859-2", "WINDOWS-1250"], + alphabet="abcčdefghijklmnoprsštuvzžABCČDEFGHIJKLMNOPRSŠTUVZŽ", + wiki_start_pages=[ + "Glavna_stran"]), + 'Serbian':Language(name="Serbian", iso_code="sr", + alphabet="АБВГДЂЕЖЗИЈКЛЉМНЊОПРСТЋУФХЦЧЏШабвгдђежзијклљмнњопрстћуфхцчџш", + charsets=[ + "ISO-8859-5", "WINDOWS-1251", + "MacCyrillic", "IBM855"], + wiki_start_pages=[ + "Главна_страна"]), + 'Thai':Language(name="Thai", iso_code="th", + use_ascii=False, + charsets=[ + "ISO-8859-11", "TIS-620", "CP874"], + alphabet="กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛", + wiki_start_pages=[ + "หน้าหลัก"]), + 'Turkish':Language(name="Turkish", iso_code="tr", + use_ascii=False, + charsets=[ + "ISO-8859-3", "ISO-8859-9", + "WINDOWS-1254"], + alphabet="abcçdefgğhıijklmnoöprsştuüvyzâîûABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZÂÎÛ", + wiki_start_pages=[ + "Ana_Sayfa"]), + 'Vietnamese':Language(name="Vietnamese", iso_code="vi", + use_ascii=False, + charsets=[ + "WINDOWS-1258"], + alphabet="aăâbcdđeêghiklmnoôơpqrstuưvxyAĂÂBCDĐEÊGHIKLMNOÔƠPQRSTUƯVXY", + wiki_start_pages=[ + "Chữ_Quốc_ngữ"])} diff --git a/APPS_UNCOMPILED/lib/chardet/sbcharsetprober.py b/APPS_UNCOMPILED/lib/chardet/sbcharsetprober.py new file mode 100644 index 0000000..c8f1959 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/sbcharsetprober.py @@ -0,0 +1,100 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/sbcharsetprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 6136 bytes +from collections import namedtuple +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood +SingleByteCharSetModel = namedtuple("SingleByteCharSetModel", [ + 'charset_name', + 'language', + 'char_to_order_map', + 'language_model', + 'typical_positive_ratio', + 'keep_ascii_letters', + 'alphabet']) + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + self._reversed = reversed + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + return self._model.charset_name + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + return self._model.language + + def feed(self, byte_str): + if not self._model.keep_ascii_letters: + byte_str = self.filter_international_words(byte_str) + elif not byte_str: + return self.state + char_to_order_map = self._model.char_to_order_map + language_model = self._model.language_model + for char in byte_str: + order = char_to_order_map.get(char, CharacterCategory.UNDEFINED) + if order < CharacterCategory.CONTROL: + self._total_char += 1 + elif order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + lm_cat = language_model[self._last_order][order] + else: + lm_cat = language_model[order][self._last_order] + self._seq_counters[lm_cat] += 1 + self._last_order = order + + charset_name = self._model.charset_name + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug("%s confidence = %s, we have a winner", charset_name, confidence) + self._state = ProbingState.FOUND_IT + else: + if confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug("%s confidence = %s, below negative shortcut threshhold %s", charset_name, confidence, self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = 1.0 * self._seq_counters[SequenceLikelihood.POSITIVE] / self._total_seqs / self._model.typical_positive_ratio + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/APPS_UNCOMPILED/lib/chardet/sbcsgroupprober.py b/APPS_UNCOMPILED/lib/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..f1cc994 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/sbcsgroupprober.py @@ -0,0 +1,42 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/sbcsgroupprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 4309 bytes +from .charsetgroupprober import CharSetGroupProber +from .hebrewprober import HebrewProber +from .langbulgarianmodel import ISO_8859_5_BULGARIAN_MODEL, WINDOWS_1251_BULGARIAN_MODEL +from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL +from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL +from .langrussianmodel import IBM855_RUSSIAN_MODEL, IBM866_RUSSIAN_MODEL, ISO_8859_5_RUSSIAN_MODEL, KOI8_R_RUSSIAN_MODEL, MACCYRILLIC_RUSSIAN_MODEL, WINDOWS_1251_RUSSIAN_MODEL +from .langthaimodel import TIS_620_THAI_MODEL +from .langturkishmodel import ISO_8859_9_TURKISH_MODEL +from .sbcharsetprober import SingleByteCharSetProber + +class SBCSGroupProber(CharSetGroupProber): + + def __init__(self): + super(SBCSGroupProber, self).__init__() + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, False, hebrew_prober) + visual_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, True, hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + self.probers = [ + SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL), + SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL), + SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM866_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM855_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL), + SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL), + SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL), + SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL), + SingleByteCharSetProber(TIS_620_THAI_MODEL), + SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL), + hebrew_prober, + logical_hebrew_prober, + visual_hebrew_prober] + self.reset() diff --git a/APPS_UNCOMPILED/lib/chardet/sjisprober.py b/APPS_UNCOMPILED/lib/chardet/sjisprober.py new file mode 100644 index 0000000..afea920 --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/sjisprober.py @@ -0,0 +1,66 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/sjisprober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 3774 bytes +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + +class SJISProber(MultiByteCharSetProber): + + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug("%s %s prober hit error at byte %s", self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[(2 - char_len)[:None]], char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[(i + 1 - char_len)[:i + 3 - char_len]], char_len) + self.distribution_analyzer.feed(byte_str[(i - 1)[:i + 1]], char_len) + + self._last_char[0] = byte_str[-1] + if self.state == ProbingState.DETECTING: + if self.context_analyzer.got_enough_data(): + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/APPS_UNCOMPILED/lib/chardet/universaldetector.py b/APPS_UNCOMPILED/lib/chardet/universaldetector.py new file mode 100644 index 0000000..7903fcc --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/universaldetector.py @@ -0,0 +1,204 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/universaldetector.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 12503 bytes +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" +import codecs, logging, re +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + +class UniversalDetector(object): + __doc__ = "\n The ``UniversalDetector`` class underlies the ``chardet.detect`` function\n and coordinates all of the different charset probers.\n\n To get a ``dict`` containing an encoding and its confidence, you can simply\n run:\n\n .. code::\n\n u = UniversalDetector()\n u.feed(some_bytes)\n u.close()\n detected = u.result\n\n " + MINIMUM_THRESHOLD = 0.2 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xff]') + ESC_DETECTOR = re.compile(b'(\x1b|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9f]') + ISO_WIN_MAP = {'iso-8859-1': '"Windows-1252"', + 'iso-8859-2': '"Windows-1250"', + 'iso-8859-5': '"Windows-1251"', + 'iso-8859-6': '"Windows-1256"', + 'iso-8859-7': '"Windows-1253"', + 'iso-8859-8': '"Windows-1255"', + 'iso-8859-9': '"Windows-1254"', + 'iso-8859-13': '"Windows-1257"'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding':None, + 'confidence':0.0, 'language':None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + if not len(byte_str): + return + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + if not self._got_data: + if byte_str.startswith(codecs.BOM_UTF8): + self.result = {'encoding':"UTF-8-SIG", 'confidence':1.0, + 'language':""} + else: + if byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + self.result = {'encoding':"UTF-32", + 'confidence':1.0, + 'language':""} + else: + if byte_str.startswith(b'\xfe\xff\x00\x00'): + self.result = {'encoding':"X-ISO-10646-UCS-4-3412", 'confidence':1.0, + 'language':""} + else: + if byte_str.startswith(b'\x00\x00\xff\xfe'): + self.result = {'encoding':"X-ISO-10646-UCS-4-2143", 'confidence':1.0, + 'language':""} + else: + if byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + self.result = {'encoding':"UTF-16", + 'confidence':1.0, + 'language':""} + self._got_data = True + if self.result["encoding"] is not None: + self.done = True + return + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + else: + if self._input_state == InputState.PURE_ASCII: + if self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + self._last_char = byte_str[(-1)[:None]] + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding':(self._esc_charset_prober).charset_name, 'confidence':(self._esc_charset_prober.get_confidence)(), + 'language':(self._esc_charset_prober).language} + self.done = True + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [ + MBCSGroupProber(self.lang_filter)] + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding':prober.charset_name, + 'confidence':(prober.get_confidence)(), + 'language':prober.language} + self.done = True + break + + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + if self.done: + return self.result + self.done = True + self._got_data or self.logger.debug("no data received!") + else: + if self._input_state == InputState.PURE_ASCII: + self.result = {'encoding':"ascii", + 'confidence':1.0, + 'language':""} + else: + if self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + + if max_prober: + if max_prober_confidence > self.MINIMUM_THRESHOLD: + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + if lower_charset_name.startswith("iso-8859"): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, charset_name) + self.result = {'encoding':charset_name, + 'confidence':confidence, + 'language':max_prober.language} + elif self.logger.getEffectiveLevel() <= logging.DEBUG and self.result["encoding"] is None: + self.logger.debug("no probers hit minimum threshold") + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug("%s %s confidence = %s", prober.charset_name, prober.language, prober.get_confidence()) + + else: + self.logger.debug("%s %s confidence = %s", group_prober.charset_name, group_prober.language, group_prober.get_confidence()) + + return self.result diff --git a/APPS_UNCOMPILED/lib/chardet/utf8prober.py b/APPS_UNCOMPILED/lib/chardet/utf8prober.py new file mode 100644 index 0000000..26b511f --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/utf8prober.py @@ -0,0 +1,57 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/utf8prober.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 2766 bytes +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START and self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + return unlike diff --git a/APPS_UNCOMPILED/lib/chardet/version.py b/APPS_UNCOMPILED/lib/chardet/version.py new file mode 100644 index 0000000..16b921f --- /dev/null +++ b/APPS_UNCOMPILED/lib/chardet/version.py @@ -0,0 +1,15 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/chardet/version.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 242 bytes +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" +__version__ = "4.0.0" +VERSION = __version__.split(".") diff --git a/APPS_UNCOMPILED/lib/deprecation.py b/APPS_UNCOMPILED/lib/deprecation.py new file mode 100644 index 0000000..609d23a --- /dev/null +++ b/APPS_UNCOMPILED/lib/deprecation.py @@ -0,0 +1,176 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/deprecation.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 12895 bytes +import collections, functools, textwrap, warnings +from packaging import version +from datetime import date +__version__ = "2.1.0" +__all__ = [ + 'deprecated', 'message_location', 'fail_if_not_removed', + 'DeprecatedWarning', + 'UnsupportedWarning'] +message_location = "bottom" + +class DeprecatedWarning(DeprecationWarning): + __doc__ = "A warning class for deprecated methods\n\n This is a specialization of the built-in :class:`DeprecationWarning`,\n adding parameters that allow us to get information into the __str__\n that ends up being sent through the :mod:`warnings` system.\n The attributes aren't able to be retrieved after the warning gets\n raised and passed through the system as only the class--not the\n instance--and message are what gets preserved.\n\n :param function: The function being deprecated.\n :param deprecated_in: The version that ``function`` is deprecated in\n :param removed_in: The version or :class:`datetime.date` specifying\n when ``function`` gets removed.\n :param details: Optional details about the deprecation. Most often\n this will include directions on what to use instead\n of the now deprecated code.\n " + + def __init__(self, function, deprecated_in, removed_in, details=''): + self.function = function + self.deprecated_in = deprecated_in + self.removed_in = removed_in + self.details = details + super(DeprecatedWarning, self).__init__(function, deprecated_in, removed_in, details) + + def __str__(self): + parts = collections.defaultdict(str) + parts["function"] = self.function + if self.deprecated_in: + parts["deprecated"] = " as of %s" % self.deprecated_in + if self.removed_in: + parts["removed"] = " and will be removed {} {}".format("on" if isinstance(self.removed_in, date) else "in", self.removed_in) + if any([self.deprecated_in, self.removed_in, self.details]): + parts["period"] = "." + if self.details: + parts["details"] = " %s" % self.details + return "%(function)s is deprecated%(deprecated)s%(removed)s%(period)s%(details)s" % parts + + +class UnsupportedWarning(DeprecatedWarning): + __doc__ = "A warning class for methods to be removed\n\n This is a subclass of :class:`~deprecation.DeprecatedWarning` and is used\n to output a proper message about a function being unsupported.\n Additionally, the :func:`~deprecation.fail_if_not_removed` decorator\n will handle this warning and cause any tests to fail if the system\n under test uses code that raises this warning.\n " + + def __str__(self): + parts = collections.defaultdict(str) + parts["function"] = self.function + parts["removed"] = self.removed_in + if self.details: + parts["details"] = " %s" % self.details + return "%(function)s is unsupported as of %(removed)s.%(details)s" % parts + + +def deprecated(deprecated_in=None, removed_in=None, current_version=None, details=''): + """Decorate a function to signify its deprecation + + This function wraps a method that will soon be removed and does two things: + * The docstring of the method will be modified to include a notice + about deprecation, e.g., "Deprecated since 0.9.11. Use foo instead." + * Raises a :class:`~deprecation.DeprecatedWarning` + via the :mod:`warnings` module, which is a subclass of the built-in + :class:`DeprecationWarning`. Note that built-in + :class:`DeprecationWarning`s are ignored by default, so for users + to be informed of said warnings they will need to enable them--see + the :mod:`warnings` module documentation for more details. + + :param deprecated_in: The version at which the decorated method is + considered deprecated. This will usually be the + next version to be released when the decorator is + added. The default is **None**, which effectively + means immediate deprecation. If this is not + specified, then the `removed_in` and + `current_version` arguments are ignored. + :param removed_in: The version or :class:`datetime.date` when the decorated + method will be removed. The default is **None**, + specifying that the function is not currently planned + to be removed. + Note: This parameter cannot be set to a value if + `deprecated_in=None`. + :param current_version: The source of version information for the + currently running code. This will usually be + a `__version__` attribute on your library. + The default is `None`. + When `current_version=None` the automation to + determine if the wrapped function is actually + in a period of deprecation or time for removal + does not work, causing a + :class:`~deprecation.DeprecatedWarning` + to be raised in all cases. + :param details: Extra details to be added to the method docstring and + warning. For example, the details may point users to + a replacement method, such as "Use the foo_bar + method instead". By default there are no details. + """ + if deprecated_in is None: + if removed_in is not None: + raise TypeError("Cannot set removed_in to a value without also setting deprecated_in") + else: + is_deprecated = False + is_unsupported = False + if isinstance(removed_in, date): + if date.today() >= removed_in: + is_unsupported = True + else: + is_deprecated = True + else: + if current_version: + current_version = version.parse(current_version) + if removed_in and current_version >= version.parse(removed_in): + is_unsupported = True + elif deprecated_in: + if current_version >= version.parse(deprecated_in): + is_deprecated = True + else: + is_deprecated = True + should_warn = any([is_deprecated, is_unsupported]) + + def _function_wrapper(function): + if should_warn: + existing_docstring = function.__doc__ or "" + parts = {'deprecated_in':" %s" % deprecated_in if deprecated_in else "", + 'removed_in':"\n This will be removed {} {}.".format("on" if isinstance(removed_in, date) else "in", removed_in) if removed_in else "", + 'details':" %s" % details if details else ""} + deprecation_note = (".. deprecated::{deprecated_in}{removed_in}{details}".format)(**parts) + loc = 1 + string_list = existing_docstring.split("\n", 1) + if len(string_list) > 1: + string_list[1] = textwrap.dedent(string_list[1]) + string_list.insert(loc, "\n") + if message_location != "top": + loc = 3 + string_list.insert(loc, deprecation_note) + string_list.insert(loc, "\n\n") + function.__doc__ = "".join(string_list) + + @functools.wraps(function) + def _inner(*args, **kwargs): + if should_warn: + if is_unsupported: + cls = UnsupportedWarning + else: + cls = DeprecatedWarning + the_warning = cls(function.__name__, deprecated_in, removed_in, details) + warnings.warn(the_warning, category=DeprecationWarning, stacklevel=2) + return function(*args, **kwargs) + + return _inner + + return _function_wrapper + + +def fail_if_not_removed(method): + """Decorate a test method to track removal of deprecated code + + This decorator catches :class:`~deprecation.UnsupportedWarning` + warnings that occur during testing and causes unittests to fail, + making it easier to keep track of when code should be removed. + + :raises: :class:`AssertionError` if an + :class:`~deprecation.UnsupportedWarning` + is raised while running the test method. + """ + + @functools.wraps(method) + def test_inner(*args, **kwargs): + with warnings.catch_warnings(record=True) as caught_warnings: + warnings.simplefilter("always") + rv = method(*args, **kwargs) + for warning in caught_warnings: + if warning.category == UnsupportedWarning: + raise AssertionError("%s uses a function that should be removed: %s" % ( + method, str(warning.message))) + + return rv + + return test_inner diff --git a/APPS_UNCOMPILED/lib/h2/__init__.py b/APPS_UNCOMPILED/lib/h2/__init__.py new file mode 100644 index 0000000..4489df0 --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/__init__.py @@ -0,0 +1,14 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 86 bytes +""" +h2 +~~ + +A HTTP/2 implementation. +""" +__version__ = "2.6.2" diff --git a/APPS_UNCOMPILED/lib/h2/config.py b/APPS_UNCOMPILED/lib/h2/config.py new file mode 100644 index 0000000..de19b0e --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/config.py @@ -0,0 +1,80 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/config.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5764 bytes +""" +h2/config +~~~~~~~~~ + +Objects for controlling the configuration of the HTTP/2 stack. +""" + +class _BooleanConfigOption(object): + __doc__ = "\n Descriptor for handling a boolean config option. This will block\n attempts to set boolean config options to non-bools.\n " + + def __init__(self, name): + self.name = name + self.attr_name = "_%s" % self.name + + def __get__(self, instance, owner): + return getattr(instance, self.attr_name) + + def __set__(self, instance, value): + if not isinstance(value, bool): + raise ValueError("%s must be a bool" % self.name) + setattr(instance, self.attr_name, value) + + +class DummyLogger(object): + __doc__ = "\n An Logger object that does not actual logging, hence a DummyLogger.\n\n For the class the log operation is merely a no-op. The intent is to avoid\n conditionals being sprinkled throughout the hyper-h2 code for calls to\n logging functions when no logger is passed into the corresponding object.\n " + + def __init__(self, *vargs): + pass + + def debug(self, *vargs, **kwargs): + """ + No-op logging. Only level needed for now. + """ + pass + + +class H2Configuration(object): + __doc__ = "\n An object that controls the way a single HTTP/2 connection behaves.\n\n This object allows the users to customize behaviour. In particular, it\n allows users to enable or disable optional features, or to otherwise handle\n various unusual behaviours.\n\n This object has very little behaviour of its own: it mostly just ensures\n that configuration is self-consistent.\n\n :param client_side: Whether this object is to be used on the client side of\n a connection, or on the server side. Affects the logic used by the\n state machine, the default settings values, the allowable stream IDs,\n and several other properties. Defaults to ``True``.\n :type client_side: ``bool``\n\n :param header_encoding: Controls whether the headers emitted by this object\n in events are transparently decoded to ``unicode`` strings, and what\n encoding is used to do that decoding. For historical reasons, this\n defaults to ``'utf-8'``. To prevent the decoding of headers (that is,\n to force them to be returned as bytestrings), this can be set to\n ``False`` or the empty string.\n :type header_encoding: ``str``, ``False``, or ``None``\n\n :param validate_outbound_headers: Controls whether the headers emitted\n by this object are validated against the rules in RFC 7540.\n Disabling this setting will cause outbound header validation to\n be skipped, and allow the object to emit headers that may be illegal\n according to RFC 7540. Defaults to ``True``.\n :type validate_outbound_headers: ``bool``\n\n :param normalize_outbound_headers: Controls whether the headers emitted\n by this object are normalized before sending. Disabling this setting\n will cause outbound header normalization to be skipped, and allow\n the object to emit headers that may be illegal according to\n RFC 7540. Defaults to ``True``.\n :type normalize_outbound_headers: ``bool``\n\n :param validate_inbound_headers: Controls whether the headers received\n by this object are validated against the rules in RFC 7540.\n Disabling this setting will cause inbound header validation to\n be skipped, and allow the object to receive headers that may be illegal\n according to RFC 7540. Defaults to ``True``.\n :type validate_inbound_headers: ``bool``\n\n :param logger: A logger that conforms to the requirements for this module,\n those being no I/O and no context switches, which is needed in order\n to run in asynchronous operation.\n\n .. versionadded:: 2.6.0\n\n :type logger: ``logging.Logger``\n " + client_side = _BooleanConfigOption("client_side") + validate_outbound_headers = _BooleanConfigOption("validate_outbound_headers") + normalize_outbound_headers = _BooleanConfigOption("normalize_outbound_headers") + validate_inbound_headers = _BooleanConfigOption("validate_inbound_headers") + + def __init__(self, client_side=True, header_encoding='utf-8', validate_outbound_headers=True, normalize_outbound_headers=True, validate_inbound_headers=True, logger=None): + self.client_side = client_side + self.header_encoding = header_encoding + self.validate_outbound_headers = validate_outbound_headers + self.normalize_outbound_headers = normalize_outbound_headers + self.validate_inbound_headers = validate_inbound_headers + self.logger = logger or DummyLogger(__name__) + + @property + def header_encoding(self): + """ + Controls whether the headers emitted by this object in events are + transparently decoded to ``unicode`` strings, and what encoding is used + to do that decoding. For historical reasons, this defaults to + ``'utf-8'``. To prevent the decoding of headers (that is, to force them + to be returned as bytestrings), this can be set to ``False`` or the + empty string. + """ + return self._header_encoding + + @header_encoding.setter + def header_encoding(self, value): + """ + Enforces constraints on the value of header encoding. + """ + if not isinstance(value, (bool, str, type(None))): + raise ValueError("header_encoding must be bool, string, or None") + if value is True: + raise ValueError("header_encoding cannot be True") + self._header_encoding = value diff --git a/APPS_UNCOMPILED/lib/h2/connection.py b/APPS_UNCOMPILED/lib/h2/connection.py new file mode 100644 index 0000000..c4b591b --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/connection.py @@ -0,0 +1,1688 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/connection.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 84056 bytes +""" +h2/connection +~~~~~~~~~~~~~ + +An implementation of a HTTP/2 connection. +""" +import base64 +from enum import Enum, IntEnum +from hyperframe.exceptions import InvalidPaddingError +from hyperframe.frame import GoAwayFrame, WindowUpdateFrame, HeadersFrame, DataFrame, PingFrame, PushPromiseFrame, SettingsFrame, RstStreamFrame, PriorityFrame, ContinuationFrame, AltSvcFrame +from hpack.hpack import Encoder, Decoder +from hpack.exceptions import HPACKError +from .config import H2Configuration +from .errors import ErrorCodes, _error_code_from_int +from .events import WindowUpdated, RemoteSettingsChanged, PingAcknowledged, SettingsAcknowledged, ConnectionTerminated, PriorityUpdated, AlternativeServiceAvailable +from .exceptions import ProtocolError, NoSuchStreamError, FlowControlError, FrameTooLargeError, TooManyStreamsError, StreamClosedError, StreamIDTooLowError, NoAvailableStreamIDError, RFC1122Error, DenialOfServiceError +from .frame_buffer import FrameBuffer +from .settings import Settings, SettingCodes +from .stream import H2Stream, StreamClosedBy +from .utilities import guard_increment_window +from .windows import WindowManager +try: + from hpack.exceptions import OversizedHeaderListError +except ImportError: + + class OversizedHeaderListError(Exception): + pass + + +try: + from hyperframe.frame import ExtensionFrame +except ImportError: + + class ExtensionFrame(object): + pass + + +class ConnectionState(Enum): + IDLE = 0 + CLIENT_OPEN = 1 + SERVER_OPEN = 2 + CLOSED = 3 + + +class ConnectionInputs(Enum): + SEND_HEADERS = 0 + SEND_PUSH_PROMISE = 1 + SEND_DATA = 2 + SEND_GOAWAY = 3 + SEND_WINDOW_UPDATE = 4 + SEND_PING = 5 + SEND_SETTINGS = 6 + SEND_RST_STREAM = 7 + SEND_PRIORITY = 8 + RECV_HEADERS = 9 + RECV_PUSH_PROMISE = 10 + RECV_DATA = 11 + RECV_GOAWAY = 12 + RECV_WINDOW_UPDATE = 13 + RECV_PING = 14 + RECV_SETTINGS = 15 + RECV_RST_STREAM = 16 + RECV_PRIORITY = 17 + SEND_ALTERNATIVE_SERVICE = 18 + RECV_ALTERNATIVE_SERVICE = 19 + + +class AllowedStreamIDs(IntEnum): + EVEN = 0 + ODD = 1 + + +class H2ConnectionStateMachine(object): + __doc__ = "\n A single HTTP/2 connection state machine.\n\n This state machine, while defined in its own class, is logically part of\n the H2Connection class also defined in this file. The state machine itself\n maintains very little state directly, instead focusing entirely on managing\n state transitions.\n " + _transitions = {( + ConnectionState.IDLE, ConnectionInputs.SEND_HEADERS): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.IDLE, ConnectionInputs.RECV_HEADERS): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.IDLE, ConnectionInputs.SEND_SETTINGS): ( + None, ConnectionState.IDLE), + + ( + ConnectionState.IDLE, ConnectionInputs.RECV_SETTINGS): ( + None, ConnectionState.IDLE), + + ( + ConnectionState.IDLE, ConnectionInputs.SEND_WINDOW_UPDATE): ( + None, ConnectionState.IDLE), + + ( + ConnectionState.IDLE, ConnectionInputs.RECV_WINDOW_UPDATE): ( + None, ConnectionState.IDLE), + + ( + ConnectionState.IDLE, ConnectionInputs.SEND_PING): ( + None, ConnectionState.IDLE), + + ( + ConnectionState.IDLE, ConnectionInputs.RECV_PING): ( + None, ConnectionState.IDLE), + + ( + ConnectionState.IDLE, ConnectionInputs.SEND_GOAWAY): ( + None, ConnectionState.CLOSED), + + ( + ConnectionState.IDLE, ConnectionInputs.RECV_GOAWAY): ( + None, ConnectionState.CLOSED), + + ( + ConnectionState.IDLE, ConnectionInputs.SEND_PRIORITY): ( + None, ConnectionState.IDLE), + + ( + ConnectionState.IDLE, ConnectionInputs.RECV_PRIORITY): ( + None, ConnectionState.IDLE), + + ( + ConnectionState.IDLE, ConnectionInputs.SEND_ALTERNATIVE_SERVICE): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.IDLE, ConnectionInputs.RECV_ALTERNATIVE_SERVICE): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_HEADERS): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_DATA): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_GOAWAY): ( + None, ConnectionState.CLOSED), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_WINDOW_UPDATE): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_PING): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_SETTINGS): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_PRIORITY): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_HEADERS): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PUSH_PROMISE): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_DATA): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_GOAWAY): ( + None, ConnectionState.CLOSED), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_WINDOW_UPDATE): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PING): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_SETTINGS): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.SEND_RST_STREAM): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_RST_STREAM): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, ConnectionInputs.RECV_PRIORITY): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.CLIENT_OPEN, + ConnectionInputs.RECV_ALTERNATIVE_SERVICE): ( + None, ConnectionState.CLIENT_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_HEADERS): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PUSH_PROMISE): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_DATA): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_GOAWAY): ( + None, ConnectionState.CLOSED), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_WINDOW_UPDATE): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PING): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_SETTINGS): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_PRIORITY): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_HEADERS): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_DATA): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_GOAWAY): ( + None, ConnectionState.CLOSED), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_WINDOW_UPDATE): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_PING): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_SETTINGS): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_PRIORITY): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.SEND_RST_STREAM): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, ConnectionInputs.RECV_RST_STREAM): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, + ConnectionInputs.SEND_ALTERNATIVE_SERVICE): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.SERVER_OPEN, + ConnectionInputs.RECV_ALTERNATIVE_SERVICE): ( + None, ConnectionState.SERVER_OPEN), + + ( + ConnectionState.CLOSED, ConnectionInputs.SEND_GOAWAY): ( + None, ConnectionState.CLOSED), + + ( + ConnectionState.CLOSED, ConnectionInputs.RECV_GOAWAY): ( + None, ConnectionState.CLOSED)} + + def __init__(self): + self.state = ConnectionState.IDLE + + def process_input(self, input_): + """ + Process a specific input in the state machine. + """ + if not isinstance(input_, ConnectionInputs): + raise ValueError("Input must be an instance of ConnectionInputs") + else: + try: + func, target_state = self._transitions[(self.state, input_)] + except KeyError: + old_state = self.state + self.state = ConnectionState.CLOSED + raise ProtocolError("Invalid input %s in state %s" % (input_, old_state)) + else: + self.state = target_state + if func is not None: + return func() + return [] + + +class H2Connection(object): + __doc__ = "\n A low-level HTTP/2 connection object. This handles building and receiving\n frames and maintains both connection and per-stream state for all streams\n on this connection.\n\n This wraps a HTTP/2 Connection state machine implementation, ensuring that\n frames can only be sent/received when the connection is in a valid state.\n It also builds stream state machines on demand to ensure that the\n constraints of those state machines are met as well. Attempts to create\n frames that cannot be sent will raise a ``ProtocolError``.\n\n .. versionchanged:: 2.3.0\n Added the ``header_encoding`` keyword argument.\n\n .. versionchanged:: 2.5.0\n Added the ``config`` keyword argument. Deprecated the ``client_side``\n and ``header_encoding`` parameters.\n\n :param client_side: Whether this object is to be used on the client side of\n a connection, or on the server side. Affects the logic used by the\n state machine, the default settings values, the allowable stream IDs,\n and several other properties. Defaults to ``True``.\n\n .. deprecated:: 2.5.0\n\n :type client_side: ``bool``\n\n :param header_encoding: Controls whether the headers emitted by this object\n in events are transparently decoded to ``unicode`` strings, and what\n encoding is used to do that decoding. For historical reason, this\n defaults to ``'utf-8'``. To prevent the decoding of headers (that is,\n to force them to be returned as bytestrings), this can be set to\n ``False`` or the empty string.\n\n .. deprecated:: 2.5.0\n\n :type header_encoding: ``str`` or ``False``\n\n :param config: The configuration for the HTTP/2 connection. If provided,\n supersedes the deprecated ``client_side`` and ``header_encoding``\n values.\n\n .. versionadded:: 2.5.0\n\n :type config: :class:`H2Configuration `\n " + DEFAULT_MAX_OUTBOUND_FRAME_SIZE = 65535 + DEFAULT_MAX_INBOUND_FRAME_SIZE = 16777216 + HIGHEST_ALLOWED_STREAM_ID = 2147483647 + MAX_WINDOW_INCREMENT = 2147483647 + DEFAULT_MAX_HEADER_LIST_SIZE = 65536 + + def __init__(self, client_side=True, header_encoding='utf-8', config=None): + self.state_machine = H2ConnectionStateMachine() + self.streams = {} + self.highest_inbound_stream_id = 0 + self.highest_outbound_stream_id = 0 + self.encoder = Encoder() + self.decoder = Decoder() + self.decoder.max_header_list_size = self.DEFAULT_MAX_HEADER_LIST_SIZE + self.config = config + if self.config is None: + self.config = H2Configuration(client_side=client_side, + header_encoding=header_encoding) + self.local_settings = Settings(client=(self.config.client_side), + initial_values={(SettingCodes.MAX_CONCURRENT_STREAMS): 100, + (SettingCodes.MAX_HEADER_LIST_SIZE): (self.DEFAULT_MAX_HEADER_LIST_SIZE)}) + self.remote_settings = Settings(client=(not self.config.client_side)) + self.outbound_flow_control_window = self.remote_settings.initial_window_size + self.max_outbound_frame_size = self.remote_settings.max_frame_size + self.max_inbound_frame_size = self.local_settings.max_frame_size + self.incoming_buffer = FrameBuffer(server=(not self.config.client_side)) + self._header_frames = [] + self._data_to_send = b'' + self._closed_streams = {} + self._inbound_flow_control_window_manager = WindowManager(max_window_size=(self.local_settings.initial_window_size)) + self._frame_dispatch_table = {HeadersFrame: (self._receive_headers_frame), + PushPromiseFrame: (self._receive_push_promise_frame), + SettingsFrame: (self._receive_settings_frame), + DataFrame: (self._receive_data_frame), + WindowUpdateFrame: (self._receive_window_update_frame), + PingFrame: (self._receive_ping_frame), + RstStreamFrame: (self._receive_rst_stream_frame), + PriorityFrame: (self._receive_priority_frame), + GoAwayFrame: (self._receive_goaway_frame), + ContinuationFrame: (self._receive_naked_continuation), + AltSvcFrame: (self._receive_alt_svc_frame), + ExtensionFrame: (self._receive_unknown_frame)} + + def _prepare_for_sending(self, frames): + if not frames: + return + self._data_to_send += (b'').join((f.serialize() for f in frames)) + assert all((f.body_len <= self.max_outbound_frame_size for f in frames)) + + def _open_streams(self, remainder): + """ + A common method of counting number of open streams. Returns the number + of streams that are open *and* that have (stream ID % 2) == remainder. + While it iterates, also deletes any closed streams. + """ + count = 0 + to_delete = [] + for stream_id, stream in self.streams.items(): + if stream.open: + if stream_id % 2 == remainder: + count += 1 + if stream.closed: + to_delete.append(stream_id) + + for stream_id in to_delete: + stream = self.streams.pop(stream_id) + self._closed_streams[stream_id] = stream.closed_by + + return count + + @property + def open_outbound_streams(self): + """ + The current number of open outbound streams. + """ + outbound_numbers = int(self.config.client_side) + return self._open_streams(outbound_numbers) + + @property + def open_inbound_streams(self): + """ + The current number of open inbound streams. + """ + inbound_numbers = int(not self.config.client_side) + return self._open_streams(inbound_numbers) + + @property + def header_encoding(self): + """ + Controls whether the headers emitted by this object in events are + transparently decoded to ``unicode`` strings, and what encoding is used + to do that decoding. For historical reason, this defaults to + ``'utf-8'``. To prevent the decoding of headers (that is, to force them + to be returned as bytestrings), this can be set to ``False`` or the + empty string. + + .. versionadded:: 2.3.0 + + .. deprecated:: 2.5.0 + Use :data:`config ` instead. + """ + return self.config.header_encoding + + @header_encoding.setter + def header_encoding(self, value): + """ + Setter for header encoding config value. + """ + self.config.header_encoding = value + + @property + def client_side(self): + """ + Whether this object is to be used on the client side of a connection, + or on the server side. Affects the logic used by the state machine, the + default settings values, the allowable stream IDs, and several other + properties. Defaults to ``True``. + + .. deprecated:: 2.5.0 + Use :data:`config ` instead. + """ + return self.config.client_side + + @property + def inbound_flow_control_window(self): + """ + The size of the inbound flow control window for the connection. This is + rarely publicly useful: instead, use :meth:`remote_flow_control_window + `. This + shortcut is largely present to provide a shortcut to this data. + """ + return self._inbound_flow_control_window_manager.current_window_size + + def _begin_new_stream(self, stream_id, allowed_ids): + """ + Initiate a new stream. + + .. versionchanged:: 2.0.0 + Removed this function from the public API. + + :param stream_id: The ID of the stream to open. + :param allowed_ids: What kind of stream ID is allowed. + """ + self.config.logger.debug("Attempting to initiate stream ID %d", stream_id) + outbound = self._stream_id_is_outbound(stream_id) + highest_stream_id = self.highest_outbound_stream_id if outbound else self.highest_inbound_stream_id + if stream_id <= highest_stream_id: + raise StreamIDTooLowError(stream_id, highest_stream_id) + elif stream_id % 2 != int(allowed_ids): + raise ProtocolError("Invalid stream ID for peer.") + s = H2Stream(stream_id, + config=(self.config), + inbound_window_size=(self.local_settings.initial_window_size), + outbound_window_size=(self.remote_settings.initial_window_size)) + self.config.logger.debug("Stream ID %d created", stream_id) + s.max_inbound_frame_size = self.max_inbound_frame_size + s.max_outbound_frame_size = self.max_outbound_frame_size + self.streams[stream_id] = s + self.config.logger.debug("Current streams: %s", self.streams.keys()) + if outbound: + self.highest_outbound_stream_id = stream_id + else: + self.highest_inbound_stream_id = stream_id + return s + + def initiate_connection(self): + """ + Provides any data that needs to be sent at the start of the connection. + Must be called for both clients and servers. + """ + self.config.logger.debug("Initializing connection") + self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS) + if self.config.client_side: + preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n' + else: + preamble = b'' + f = SettingsFrame(0) + for setting, value in self.local_settings.items(): + f.settings[setting] = value + + self.config.logger.debug("Send Settings frame: %s", self.local_settings) + self._data_to_send += preamble + f.serialize() + + def initiate_upgrade_connection(self, settings_header=None): + """ + Call to initialise the connection object for use with an upgraded + HTTP/2 connection (i.e. a connection negotiated using the + ``Upgrade: h2c`` HTTP header). + + This method differs from :meth:`initiate_connection + ` in several ways. + Firstly, it handles the additional SETTINGS frame that is sent in the + ``HTTP2-Settings`` header field. When called on a client connection, + this method will return a bytestring that the caller can put in the + ``HTTP2-Settings`` field they send on their initial request. When + called on a server connection, the user **must** provide the value they + received from the client in the ``HTTP2-Settings`` header field to the + ``settings_header`` argument, which will be used appropriately. + + Additionally, this method sets up stream 1 in a half-closed state + appropriate for this side of the connection, to reflect the fact that + the request is already complete. + + Finally, this method also prepares the appropriate preamble to be sent + after the upgrade. + + .. versionadded:: 2.3.0 + + :param settings_header: (optional, server-only): The value of the + ``HTTP2-Settings`` header field received from the client. + :type settings_header: ``bytes`` + + :returns: For clients, a bytestring to put in the ``HTTP2-Settings``. + For servers, returns nothing. + :rtype: ``bytes`` or ``None`` + """ + self.config.logger.debug("Upgrade connection. Current settings: %s", self.local_settings) + frame_data = None + self.initiate_connection() + if self.config.client_side: + f = SettingsFrame(0) + for setting, value in self.local_settings.items(): + f.settings[setting] = value + + frame_data = f.serialize_body() + frame_data = base64.urlsafe_b64encode(frame_data) + else: + if settings_header: + settings_header = base64.urlsafe_b64decode(settings_header) + f = SettingsFrame(0) + f.parse_body(settings_header) + self._receive_settings_frame(f) + connection_input = ConnectionInputs.SEND_HEADERS if self.config.client_side else ConnectionInputs.RECV_HEADERS + self.config.logger.debug("Process input %s", connection_input) + self.state_machine.process_input(connection_input) + self._begin_new_stream(stream_id=1, allowed_ids=(AllowedStreamIDs.ODD)) + self.streams[1].upgrade(self.config.client_side) + return frame_data + + def _get_or_create_stream(self, stream_id, allowed_ids): + """ + Gets a stream by its stream ID. Will create one if one does not already + exist. Use allowed_ids to circumvent the usual stream ID rules for + clients and servers. + + .. versionchanged:: 2.0.0 + Removed this function from the public API. + """ + try: + return self.streams[stream_id] + except KeyError: + return self._begin_new_stream(stream_id, allowed_ids) + + def _get_stream_by_id(self, stream_id): + """ + Gets a stream by its stream ID. Raises NoSuchStreamError if the stream + ID does not correspond to a known stream and is higher than the current + maximum: raises if it is lower than the current maximum. + + .. versionchanged:: 2.0.0 + Removed this function from the public API. + """ + try: + return self.streams[stream_id] + except KeyError: + outbound = self._stream_id_is_outbound(stream_id) + highest_stream_id = self.highest_outbound_stream_id if outbound else self.highest_inbound_stream_id + if stream_id > highest_stream_id: + raise NoSuchStreamError(stream_id) + else: + raise StreamClosedError(stream_id) + + def get_next_available_stream_id(self): + """ + Returns an integer suitable for use as the stream ID for the next + stream created by this endpoint. For server endpoints, this stream ID + will be even. For client endpoints, this stream ID will be odd. If no + stream IDs are available, raises :class:`NoAvailableStreamIDError + `. + + .. warning:: The return value from this function does not change until + the stream ID has actually been used by sending or pushing + headers on that stream. For that reason, it should be + called as close as possible to the actual use of the + stream ID. + + .. versionadded:: 2.0.0 + + :raises: :class:`NoAvailableStreamIDError + ` + :returns: The next free stream ID this peer can use to initiate a + stream. + :rtype: ``int`` + """ + if not self.highest_outbound_stream_id: + next_stream_id = 1 if self.config.client_side else 2 + else: + next_stream_id = self.highest_outbound_stream_id + 2 + self.config.logger.debug("Next available stream ID %d", next_stream_id) + if next_stream_id > self.HIGHEST_ALLOWED_STREAM_ID: + raise NoAvailableStreamIDError("Exhausted allowed stream IDs") + return next_stream_id + + def send_headers(self, stream_id, headers, end_stream=False, priority_weight=None, priority_depends_on=None, priority_exclusive=None): + """ + Send headers on a given stream. + + This function can be used to send request or response headers: the kind + that are sent depends on whether this connection has been opened as a + client or server connection, and whether the stream was opened by the + remote peer or not. + + If this is a client connection, calling ``send_headers`` will send the + headers as a request. It will also implicitly open the stream being + used. If this is a client connection and ``send_headers`` has *already* + been called, this will send trailers instead. + + If this is a server connection, calling ``send_headers`` will send the + headers as a response. It is a protocol error for a server to open a + stream by sending headers. If this is a server connection and + ``send_headers`` has *already* been called, this will send trailers + instead. + + When acting as a server, you may call ``send_headers`` any number of + times allowed by the following rules, in this order: + + - zero or more times with ``(':status', '1XX')`` (where ``1XX`` is a + placeholder for any 100-level status code). + - once with any other status header. + - zero or one time for trailers. + + That is, you are allowed to send as many informational responses as you + like, followed by one complete response and zero or one HTTP trailer + blocks. + + Clients may send one or two header blocks: one request block, and + optionally one trailer block. + + If it is important to send HPACK "never indexed" header fields (as + defined in `RFC 7451 Section 7.1.3 + `_), the user may + instead provide headers using the HPACK library's :class:`HeaderTuple + ` and :class:`NeverIndexedHeaderTuple + ` objects. + + This method also allows users to prioritize the stream immediately, + by sending priority information on the HEADERS frame directly. To do + this, any one of ``priority_weight``, ``priority_depends_on``, or + ``priority_exclusive`` must be set to a value that is not ``None``. For + more information on the priority fields, see :meth:`prioritize + `. + + .. warning:: In HTTP/2, it is mandatory that all the HTTP/2 special + headers (that is, ones whose header keys begin with ``:``) appear + at the start of the header block, before any normal headers. + If you pass a dictionary to the ``headers`` parameter, it is + unlikely that they will iterate in that order, and your connection + may fail. For this reason, passing a ``dict`` to ``headers`` is + *deprecated*, and will be removed in 3.0. + + .. versionchanged:: 2.3.0 + Added support for using :class:`HeaderTuple + ` objects to store headers. + + .. versionchanged:: 2.4.0 + Added the ability to provide priority keyword arguments: + ``priority_weight``, ``priority_depends_on``, and + ``priority_exclusive``. + + :param stream_id: The stream ID to send the headers on. If this stream + does not currently exist, it will be created. + :type stream_id: ``int`` + + :param headers: The request/response headers to send. + :type headers: An iterable of two tuples of bytestrings or + :class:`HeaderTuple ` objects. + + :param end_stream: Whether this headers frame should end the stream + immediately (that is, whether no more data will be sent after this + frame). Defaults to ``False``. + :type end_stream: ``bool`` + + :param priority_weight: Sets the priority weight of the stream. See + :meth:`prioritize ` for more + about how this field works. Defaults to ``None``, which means that + no priority information will be sent. + :type priority_weight: ``int`` or ``None`` + + :param priority_depends_on: Sets which stream this one depends on for + priority purposes. See :meth:`prioritize + ` for more about how this + field works. Defaults to ``None``, which means that no priority + information will be sent. + :type priority_depends_on: ``int`` or ``None`` + + :param priority_exclusive: Sets whether this stream exclusively depends + on the stream given in ``priority_depends_on`` for priority + purposes. See :meth:`prioritize + ` for more about how this + field workds. Defaults to ``None``, which means that no priority + information will be sent. + :type priority_depends_on: ``bool`` or ``None`` + + :returns: Nothing + """ + self.config.logger.debug("Send headers on stream ID %d", stream_id) + if stream_id not in self.streams: + max_open_streams = self.remote_settings.max_concurrent_streams + if self.open_outbound_streams + 1 > max_open_streams: + raise TooManyStreamsError("Max outbound streams is %d, %d open" % ( + max_open_streams, self.open_outbound_streams)) + self.state_machine.process_input(ConnectionInputs.SEND_HEADERS) + stream = self._get_or_create_stream(stream_id, AllowedStreamIDs(self.config.client_side)) + frames = stream.send_headers(headers, self.encoder, end_stream) + priority_present = priority_weight is not None or priority_depends_on is not None or priority_exclusive is not None + if priority_present: + if not self.config.client_side: + raise RFC1122Error("Servers SHOULD NOT prioritize streams.") + headers_frame = frames[0] + headers_frame.flags.add("PRIORITY") + frames[0] = _add_frame_priority(headers_frame, priority_weight, priority_depends_on, priority_exclusive) + self._prepare_for_sending(frames) + + def send_data(self, stream_id, data, end_stream=False, pad_length=None): + """ + Send data on a given stream. + + This method does no breaking up of data: if the data is larger than the + value returned by :meth:`local_flow_control_window + ` for this stream + then a :class:`FlowControlError ` will + be raised. If the data is larger than :data:`max_outbound_frame_size + ` then a + :class:`FrameTooLargeError ` will be + raised. + + Hyper-h2 does this to avoid buffering the data internally. If the user + has more data to send than hyper-h2 will allow, consider breaking it up + and buffering it externally. + + :param stream_id: The ID of the stream on which to send the data. + :type stream_id: ``int`` + :param data: The data to send on the stream. + :type data: ``bytes`` + :param end_stream: (optional) Whether this is the last data to be sent + on the stream. Defaults to ``False``. + :type end_stream: ``bool`` + :param pad_length: (optional) Length of the padding to apply to the + data frame. Defaults to ``None`` for no use of padding. Note that + a value of ``0`` results in padding of length ``0`` + (with the "padding" flag set on the frame). + + .. versionadded:: 2.6.0 + + :type pad_length: ``int`` + :returns: Nothing + """ + self.config.logger.debug("Send data on stream ID %d with len %d", stream_id, len(data)) + frame_size = len(data) + if pad_length is not None: + if not isinstance(pad_length, int): + raise TypeError("pad_length must be an int") + if pad_length < 0 or pad_length > 255: + raise ValueError("pad_length must be within range: [0, 255]") + frame_size += pad_length + 1 + else: + self.config.logger.debug("Frame size on stream ID %d is %d", stream_id, frame_size) + if frame_size > self.local_flow_control_window(stream_id): + raise FlowControlError("Cannot send %d bytes, flow control window is %d." % ( + frame_size, self.local_flow_control_window(stream_id))) + else: + if frame_size > self.max_outbound_frame_size: + raise FrameTooLargeError("Cannot send frame size %d, max frame size is %d" % ( + frame_size, self.max_outbound_frame_size)) + self.state_machine.process_input(ConnectionInputs.SEND_DATA) + frames = self.streams[stream_id].send_data(data, + end_stream, pad_length=pad_length) + self._prepare_for_sending(frames) + self.outbound_flow_control_window -= frame_size + self.config.logger.debug("Outbound flow control window size is %d", self.outbound_flow_control_window) + assert self.outbound_flow_control_window >= 0 + + def end_stream(self, stream_id): + """ + Cleanly end a given stream. + + This method ends a stream by sending an empty DATA frame on that stream + with the ``END_STREAM`` flag set. + + :param stream_id: The ID of the stream to end. + :type stream_id: ``int`` + :returns: Nothing + """ + self.config.logger.debug("End stream ID %d", stream_id) + self.state_machine.process_input(ConnectionInputs.SEND_DATA) + frames = self.streams[stream_id].end_stream() + self._prepare_for_sending(frames) + + def increment_flow_control_window(self, increment, stream_id=None): + """ + Increment a flow control window, optionally for a single stream. Allows + the remote peer to send more data. + + .. versionchanged:: 2.0.0 + Rejects attempts to increment the flow control window by out of + range values with a ``ValueError``. + + :param increment: The amount to increment the flow control window by. + :type increment: ``int`` + :param stream_id: (optional) The ID of the stream that should have its + flow control window opened. If not present or ``None``, the + connection flow control window will be opened instead. + :type stream_id: ``int`` or ``None`` + :returns: Nothing + :raises: ``ValueError`` + """ + if not 1 <= increment <= self.MAX_WINDOW_INCREMENT: + raise ValueError("Flow control increment must be between 1 and %d" % self.MAX_WINDOW_INCREMENT) + else: + self.state_machine.process_input(ConnectionInputs.SEND_WINDOW_UPDATE) + if stream_id is not None: + stream = self.streams[stream_id] + frames = stream.increase_flow_control_window(increment) + else: + self._inbound_flow_control_window_manager.window_opened(increment) + f = WindowUpdateFrame(0) + f.window_increment = increment + frames = [f] + self.config.logger.debug("Increase stream ID %d flow control window by %d", stream_id, increment) + self._prepare_for_sending(frames) + + def push_stream(self, stream_id, promised_stream_id, request_headers): + """ + Push a response to the client by sending a PUSH_PROMISE frame. + + If it is important to send HPACK "never indexed" header fields (as + defined in `RFC 7451 Section 7.1.3 + `_), the user may + instead provide headers using the HPACK library's :class:`HeaderTuple + ` and :class:`NeverIndexedHeaderTuple + ` objects. + + :param stream_id: The ID of the stream that this push is a response to. + :type stream_id: ``int`` + :param promised_stream_id: The ID of the stream that the pushed + response will be sent on. + :type promised_stream_id: ``int`` + :param request_headers: The headers of the request that the pushed + response will be responding to. + :type request_headers: An iterable of two tuples of bytestrings or + :class:`HeaderTuple ` objects. + :returns: Nothing + """ + self.config.logger.debug("Send Push Promise frame on stream ID %d", stream_id) + if not self.remote_settings.enable_push: + raise ProtocolError("Remote peer has disabled stream push") + self.state_machine.process_input(ConnectionInputs.SEND_PUSH_PROMISE) + stream = self._get_stream_by_id(stream_id) + if stream_id % 2 == 0: + raise ProtocolError("Cannot recursively push streams.") + new_stream = self._begin_new_stream(promised_stream_id, AllowedStreamIDs.EVEN) + self.streams[promised_stream_id] = new_stream + frames = stream.push_stream_in_band(promised_stream_id, request_headers, self.encoder) + new_frames = new_stream.locally_pushed() + self._prepare_for_sending(frames + new_frames) + + def ping(self, opaque_data): + """ + Send a PING frame. + + :param opaque_data: A bytestring of length 8 that will be sent in the + PING frame. + :returns: Nothing + """ + self.config.logger.debug("Send Ping frame") + if not isinstance(opaque_data, bytes) or len(opaque_data) != 8: + raise ValueError("Invalid value for ping data: %r" % opaque_data) + self.state_machine.process_input(ConnectionInputs.SEND_PING) + f = PingFrame(0) + f.opaque_data = opaque_data + self._prepare_for_sending([f]) + + def reset_stream(self, stream_id, error_code=0): + """ + Reset a stream. + + This method forcibly closes a stream by sending a RST_STREAM frame for + a given stream. This is not a graceful closure. To gracefully end a + stream, try the :meth:`end_stream + ` method. + + :param stream_id: The ID of the stream to reset. + :type stream_id: ``int`` + :param error_code: (optional) The error code to use to reset the + stream. Defaults to :data:`ErrorCodes.NO_ERROR + `. + :type error_code: ``int`` + :returns: Nothing + """ + self.config.logger.debug("Reset stream ID %d", stream_id) + self.state_machine.process_input(ConnectionInputs.SEND_RST_STREAM) + stream = self._get_stream_by_id(stream_id) + frames = stream.reset_stream(error_code) + self._prepare_for_sending(frames) + + def close_connection(self, error_code=0, additional_data=None, last_stream_id=None): + """ + Close a connection, emitting a GOAWAY frame. + + .. versionchanged:: 2.4.0 + Added ``additional_data`` and ``last_stream_id`` arguments. + + :param error_code: (optional) The error code to send in the GOAWAY + frame. + :param additional_data: (optional) Additional debug data indicating + a reason for closing the connection. Must be a bytestring. + :param last_stream_id: (optional) The last stream which was processed + by the sender. Defaults to ``highest_inbound_stream_id``. + :returns: Nothing + """ + self.config.logger.debug("Close connection") + self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY) + if additional_data is not None: + assert isinstance(additional_data, bytes) + if last_stream_id is None: + last_stream_id = self.highest_inbound_stream_id + f = GoAwayFrame(stream_id=0, + last_stream_id=last_stream_id, + error_code=error_code, + additional_data=(additional_data or b'')) + self._prepare_for_sending([f]) + + def update_settings(self, new_settings): + """ + Update the local settings. This will prepare and emit the appropriate + SETTINGS frame. + + :param new_settings: A dictionary of {setting: new value} + """ + self.config.logger.debug("Update connection settings to %s", new_settings) + self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS) + self.local_settings.update(new_settings) + s = SettingsFrame(0) + s.settings = new_settings + self._prepare_for_sending([s]) + + def advertise_alternative_service(self, field_value, origin=None, stream_id=None): + """ + Notify a client about an available Alternative Service. + + An Alternative Service is defined in `RFC 7838 + `_. An Alternative Service + notification informs a client that a given origin is also available + elsewhere. + + Alternative Services can be advertised in two ways. Firstly, they can + be advertised explicitly: that is, a server can say "origin X is also + available at Y". To advertise like this, set the ``origin`` argument + and not the ``stream_id`` argument. Alternatively, they can be + advertised implicitly: that is, a server can say "the origin you're + contacting on stream X is also available at Y". To advertise like this, + set the ``stream_id`` argument and not the ``origin`` argument. + + The explicit method of advertising can be done as long as the + connection is active. The implicit method can only be done after the + client has sent the request headers and before the server has sent the + response headers: outside of those points, Hyper-h2 will forbid sending + the Alternative Service advertisement by raising a ProtocolError. + + The ``field_value`` parameter is specified in RFC 7838. Hyper-h2 does + not validate or introspect this argument: the user is required to + ensure that it's well-formed. ``field_value`` corresponds to RFC 7838's + "Alternative Service Field Value". + + .. note:: It is strongly preferred to use the explicit method of + advertising Alternative Services. The implicit method of + advertising Alternative Services has a number of subtleties + and can lead to inconsistencies between the server and + client. Hyper-h2 allows both mechanisms, but caution is + strongly advised. + + .. versionadded:: 2.3.0 + + :param field_value: The RFC 7838 Alternative Service Field Value. This + argument is not introspected by Hyper-h2: the user is responsible + for ensuring that it is well-formed. + :type field_value: ``bytes`` + + :param origin: The origin/authority to which the Alternative Service + being advertised applies. Must not be provided at the same time as + ``stream_id``. + :type origin: ``bytes`` or ``None`` + + :param stream_id: The ID of the stream which was sent to the authority + for which this Alternative Service advertisement applies. Must not + be provided at the same time as ``origin``. + :type stream_id: ``int`` or ``None`` + + :returns: Nothing. + """ + if not isinstance(field_value, bytes): + raise ValueError("Field must be bytestring.") + elif origin is not None: + if stream_id is not None: + raise ValueError("Must not provide both origin and stream_id") + self.state_machine.process_input(ConnectionInputs.SEND_ALTERNATIVE_SERVICE) + if origin is not None: + f = AltSvcFrame(stream_id=0) + f.origin = origin + f.field = field_value + frames = [f] + else: + stream = self._get_stream_by_id(stream_id) + frames = stream.advertise_alternative_service(field_value) + self._prepare_for_sending(frames) + + def prioritize(self, stream_id, weight=None, depends_on=None, exclusive=None): + """ + Notify a server about the priority of a stream. + + Stream priorities are a form of guidance to a remote server: they + inform the server about how important a given response is, so that the + server may allocate its resources (e.g. bandwidth, CPU time, etc.) + accordingly. This exists to allow clients to ensure that the most + important data arrives earlier, while less important data does not + starve out the more important data. + + Stream priorities are explained in depth in `RFC 7540 Section 5.3 + `_. + + This method updates the priority information of a single stream. It may + be called well before a stream is actively in use, or well after a + stream is closed. + + .. warning:: RFC 7540 allows for servers to change the priority of + streams. However, hyper-h2 **does not** allow server + stacks to do this. This is because most clients do not + adequately know how to respond when provided conflicting + priority information, and relatively little utility is + provided by making that functionality available. + + .. note:: hyper-h2 **does not** maintain any information about the + RFC 7540 priority tree. That means that hyper-h2 does not + prevent incautious users from creating invalid priority + trees, particularly by creating priority loops. While some + basic error checking is provided by hyper-h2, users are + strongly recommended to understand their prioritisation + strategies before using the priority tools here. + + .. note:: Priority information is strictly advisory. Servers are + allowed to disregard it entirely. Avoid relying on the idea + that your priority signaling will definitely be obeyed. + + .. versionadded:: 2.4.0 + + :param stream_id: The ID of the stream to prioritize. + :type stream_id: ``int`` + + :param weight: The weight to give the stream. Defaults to ``16``, the + default weight of any stream. May be any value between ``1`` and + ``256`` inclusive. The relative weight of a stream indicates what + proportion of available resources will be allocated to that + stream. + :type weight: ``int`` + + :param depends_on: The ID of the stream on which this stream depends. + This stream will only be progressed if it is impossible to + progress the parent stream (the one on which this one depends). + Passing the value ``0`` means that this stream does not depend on + any other. Defaults to ``0``. + :type depends_on: ``int`` + + :param exclusive: Whether this stream is an exclusive dependency of its + "parent" stream (i.e. the stream given by ``depends_on``). If a + stream is an exclusive dependency of another, that means that all + previously-set children of the parent are moved to become children + of the new exclusively-dependent stream. Defaults to ``False``. + :type exclusive: ``bool`` + """ + if not self.config.client_side: + raise RFC1122Error("Servers SHOULD NOT prioritize streams.") + self.state_machine.process_input(ConnectionInputs.SEND_PRIORITY) + frame = PriorityFrame(stream_id) + frame = _add_frame_priority(frame, weight, depends_on, exclusive) + self._prepare_for_sending([frame]) + + def local_flow_control_window(self, stream_id): + """ + Returns the maximum amount of data that can be sent on stream + ``stream_id``. + + This value will never be larger than the total data that can be sent on + the connection: even if the given stream allows more data, the + connection window provides a logical maximum to the amount of data that + can be sent. + + The maximum data that can be sent in a single data frame on a stream + is either this value, or the maximum frame size, whichever is + *smaller*. + + :param stream_id: The ID of the stream whose flow control window is + being queried. + :type stream_id: ``int`` + :returns: The amount of data in bytes that can be sent on the stream + before the flow control window is exhausted. + :rtype: ``int`` + """ + stream = self._get_stream_by_id(stream_id) + return min(self.outbound_flow_control_window, stream.outbound_flow_control_window) + + def remote_flow_control_window(self, stream_id): + """ + Returns the maximum amount of data the remote peer can send on stream + ``stream_id``. + + This value will never be larger than the total data that can be sent on + the connection: even if the given stream allows more data, the + connection window provides a logical maximum to the amount of data that + can be sent. + + The maximum data that can be sent in a single data frame on a stream + is either this value, or the maximum frame size, whichever is + *smaller*. + + :param stream_id: The ID of the stream whose flow control window is + being queried. + :type stream_id: ``int`` + :returns: The amount of data in bytes that can be received on the + stream before the flow control window is exhausted. + :rtype: ``int`` + """ + stream = self._get_stream_by_id(stream_id) + return min(self.inbound_flow_control_window, stream.inbound_flow_control_window) + + def acknowledge_received_data(self, acknowledged_size, stream_id): + """ + Inform the :class:`H2Connection ` that a + certain number of flow-controlled bytes have been processed, and that + the space should be handed back to the remote peer at an opportune + time. + + .. versionadded:: 2.5.0 + + :param acknowledged_size: The total *flow-controlled size* of the data + that has been processed. Note that this must include the amount of + padding that was sent with that data. + :type acknowledged_size: ``int`` + :param stream_id: The ID of the stream on which this data was received. + :type stream_id: ``int`` + :returns: Nothing + :rtype: ``None`` + """ + self.config.logger.debug("Ack received data on stream ID %d with size %d", stream_id, acknowledged_size) + if stream_id <= 0: + raise ValueError("Stream ID %d is not valid for acknowledge_received_data" % stream_id) + if acknowledged_size < 0: + raise ValueError("Cannot acknowledge negative data") + frames = [] + conn_manager = self._inbound_flow_control_window_manager + conn_increment = conn_manager.process_bytes(acknowledged_size) + if conn_increment: + f = WindowUpdateFrame(0) + f.window_increment = conn_increment + frames.append(f) + try: + stream = self._get_stream_by_id(stream_id) + except StreamClosedError: + pass + else: + if stream.open: + frames.extend(stream.acknowledge_received_data(acknowledged_size)) + + def data_to_send(self, amt=None): + """ + Returns some data for sending out of the internal data buffer. + + This method is analogous to ``read`` on a file-like object, but it + doesn't block. Instead, it returns as much data as the user asks for, + or less if that much data is not available. It does not perform any + I/O, and so uses a different name. + + :param amt: (optional) The maximum amount of data to return. If not + set, or set to ``None``, will return as much data as possible. + :type amt: ``int`` + :returns: A bytestring containing the data to send on the wire. + :rtype: ``bytes`` + """ + if amt is None: + data = self._data_to_send + self._data_to_send = b'' + return data + data = self._data_to_send[None[:amt]] + self._data_to_send = self._data_to_send[amt[:None]] + return data + + def clear_outbound_data_buffer(self): + """ + Clears the outbound data buffer, such that if this call was immediately + followed by a call to + :meth:`data_to_send `, that + call would return no data. + + This method should not normally be used, but is made available to avoid + exposing implementation details. + """ + self._data_to_send = b'' + + def _acknowledge_settings(self): + """ + Acknowledge settings that have been received. + + .. versionchanged:: 2.0.0 + Removed from public API, removed useless ``event`` parameter, made + automatic. + + :returns: Nothing + """ + self.state_machine.process_input(ConnectionInputs.SEND_SETTINGS) + changes = self.remote_settings.acknowledge() + if SettingCodes.INITIAL_WINDOW_SIZE in changes: + setting = changes[SettingCodes.INITIAL_WINDOW_SIZE] + self._flow_control_change_from_settings(setting.original_value, setting.new_value) + if SettingCodes.HEADER_TABLE_SIZE in changes: + setting = changes[SettingCodes.HEADER_TABLE_SIZE] + self.encoder.header_table_size = setting.new_value + if SettingCodes.MAX_FRAME_SIZE in changes: + setting = changes[SettingCodes.MAX_FRAME_SIZE] + self.max_outbound_frame_size = setting.new_value + for stream in self.streams.values(): + stream.max_outbound_frame_size = setting.new_value + + f = SettingsFrame(0) + f.flags.add("ACK") + return [f] + + def _flow_control_change_from_settings(self, old_value, new_value): + """ + Update flow control windows in response to a change in the value of + SETTINGS_INITIAL_WINDOW_SIZE. + + When this setting is changed, it automatically updates all flow control + windows by the delta in the settings values. Note that it does not + increment the *connection* flow control window, per section 6.9.2 of + RFC 7540. + """ + delta = new_value - old_value + for stream in self.streams.values(): + stream.outbound_flow_control_window = guard_increment_window(stream.outbound_flow_control_window, delta) + + def _inbound_flow_control_change_from_settings(self, old_value, new_value): + """ + Update remote flow control windows in response to a change in the value + of SETTINGS_INITIAL_WINDOW_SIZE. + + When this setting is changed, it automatically updates all remote flow + control windows by the delta in the settings values. + """ + delta = new_value - old_value + for stream in self.streams.values(): + stream._inbound_flow_control_change_from_settings(delta) + + def receive_data(self, data): + """ + Pass some received HTTP/2 data to the connection for handling. + + :param data: The data received from the remote peer on the network. + :type data: ``bytes`` + :returns: A list of events that the remote peer triggered by sending + this data. + """ + self.config.logger.debug("Process received data on connection. Received data: %r", data) + events = [] + self.incoming_buffer.add_data(data) + self.incoming_buffer.max_frame_size = self.max_inbound_frame_size + try: + for frame in self.incoming_buffer: + events.extend(self._receive_frame(frame)) + + except InvalidPaddingError: + self._terminate_connection(ErrorCodes.PROTOCOL_ERROR) + raise ProtocolError("Received frame with invalid padding.") + except ProtocolError as e: + try: + self._terminate_connection(e.error_code) + raise + finally: + e = None + del e + + return events + + def _receive_frame(self, frame): + """ + Handle a frame received on the connection. + + .. versionchanged:: 2.0.0 + Removed from the public API. + """ + try: + frames, events = self._frame_dispatch_table[frame.__class__](frame) + except StreamClosedError as e: + try: + if self._stream_is_closed_by_reset(e.stream_id): + f = RstStreamFrame(e.stream_id) + f.error_code = e.error_code + self._prepare_for_sending([f]) + events = e._events + else: + raise + finally: + e = None + del e + + except StreamIDTooLowError as e: + try: + if self._stream_is_closed_by_reset(e.stream_id): + f = RstStreamFrame(e.stream_id) + f.error_code = ErrorCodes.STREAM_CLOSED + self._prepare_for_sending([f]) + events = [] + else: + if self._stream_is_closed_by_end(e.stream_id): + raise StreamClosedError(e.stream_id) + else: + raise + finally: + e = None + del e + + else: + self._prepare_for_sending(frames) + return events + + def _terminate_connection(self, error_code): + """ + Terminate the connection early. Used in error handling blocks to send + GOAWAY frames. + """ + f = GoAwayFrame(0) + f.last_stream_id = self.highest_inbound_stream_id + f.error_code = error_code + self.state_machine.process_input(ConnectionInputs.SEND_GOAWAY) + self._prepare_for_sending([f]) + + def _receive_headers_frame(self, frame): + """ + Receive a headers frame on the connection. + """ + if frame.stream_id not in self.streams: + max_open_streams = self.local_settings.max_concurrent_streams + if self.open_inbound_streams + 1 > max_open_streams: + raise TooManyStreamsError("Max outbound streams is %d, %d open" % ( + max_open_streams, self.open_outbound_streams)) + headers = _decode_headers(self.decoder, frame.data) + events = self.state_machine.process_input(ConnectionInputs.RECV_HEADERS) + stream = self._get_or_create_stream(frame.stream_id, AllowedStreamIDs(not self.config.client_side)) + frames, stream_events = stream.receive_headers(headers, "END_STREAM" in frame.flags, self.config.header_encoding) + if "PRIORITY" in frame.flags: + p_frames, p_events = self._receive_priority_frame(frame) + stream_events[0].priority_updated = p_events[0] + stream_events.extend(p_events) + assert not p_frames + return (frames, events + stream_events) + + def _receive_push_promise_frame(self, frame): + """ + Receive a push-promise frame on the connection. + """ + if not self.local_settings.enable_push: + raise ProtocolError("Received pushed stream") + pushed_headers = _decode_headers(self.decoder, frame.data) + events = self.state_machine.process_input(ConnectionInputs.RECV_PUSH_PROMISE) + try: + stream = self._get_stream_by_id(frame.stream_id) + except NoSuchStreamError: + if self._stream_closed_by(frame.stream_id) == StreamClosedBy.SEND_RST_STREAM: + f = RstStreamFrame(frame.promised_stream_id) + f.error_code = ErrorCodes.REFUSED_STREAM + return ([f], events) + raise ProtocolError("Attempted to push on closed stream.") + + if frame.stream_id % 2 == 0: + raise ProtocolError("Cannot recursively push streams.") + try: + frames, stream_events = stream.receive_push_promise_in_band(frame.promised_stream_id, pushed_headers, self.config.header_encoding) + except StreamClosedError: + f = RstStreamFrame(frame.promised_stream_id) + f.error_code = ErrorCodes.REFUSED_STREAM + return ([f], events) + else: + new_stream = self._begin_new_stream(frame.promised_stream_id, AllowedStreamIDs.EVEN) + self.streams[frame.promised_stream_id] = new_stream + new_stream.remotely_pushed(pushed_headers) + return ( + frames, events + stream_events) + + def _receive_data_frame(self, frame): + """ + Receive a data frame on the connection. + """ + flow_controlled_length = frame.flow_controlled_length + events = self.state_machine.process_input(ConnectionInputs.RECV_DATA) + self._inbound_flow_control_window_manager.window_consumed(flow_controlled_length) + stream = self._get_stream_by_id(frame.stream_id) + frames, stream_events = stream.receive_data(frame.data, "END_STREAM" in frame.flags, flow_controlled_length) + return ( + frames, events + stream_events) + + def _receive_settings_frame(self, frame): + """ + Receive a SETTINGS frame on the connection. + """ + events = self.state_machine.process_input(ConnectionInputs.RECV_SETTINGS) + if "ACK" in frame.flags: + changed_settings = self._local_settings_acked() + ack_event = SettingsAcknowledged() + ack_event.changed_settings = changed_settings + events.append(ack_event) + return ([], events) + self.remote_settings.update(frame.settings) + events.append(RemoteSettingsChanged.from_settings(self.remote_settings, frame.settings)) + frames = self._acknowledge_settings() + return ( + frames, events) + + def _receive_window_update_frame(self, frame): + """ + Receive a WINDOW_UPDATE frame on the connection. + """ + if not 1 <= frame.window_increment <= self.MAX_WINDOW_INCREMENT: + raise ProtocolError("Flow control increment must be between 1 and %d, received %d" % ( + self.MAX_WINDOW_INCREMENT, frame.window_increment)) + else: + events = self.state_machine.process_input(ConnectionInputs.RECV_WINDOW_UPDATE) + if frame.stream_id: + stream = self._get_stream_by_id(frame.stream_id) + frames, stream_events = stream.receive_window_update(frame.window_increment) + else: + self.outbound_flow_control_window = guard_increment_window(self.outbound_flow_control_window, frame.window_increment) + window_updated_event = WindowUpdated() + window_updated_event.stream_id = 0 + window_updated_event.delta = frame.window_increment + stream_events = [window_updated_event] + frames = [] + return (frames, events + stream_events) + + def _receive_ping_frame(self, frame): + """ + Receive a PING frame on the connection. + """ + events = self.state_machine.process_input(ConnectionInputs.RECV_PING) + flags = [] + if "ACK" in frame.flags: + evt = PingAcknowledged() + evt.ping_data = frame.opaque_data + events.append(evt) + else: + f = PingFrame(0) + f.flags = {"ACK"} + f.opaque_data = frame.opaque_data + flags.append(f) + return (flags, events) + + def _receive_rst_stream_frame(self, frame): + """ + Receive a RST_STREAM frame on the connection. + """ + events = self.state_machine.process_input(ConnectionInputs.RECV_RST_STREAM) + try: + stream = self._get_stream_by_id(frame.stream_id) + except NoSuchStreamError: + stream_frames = [] + stream_events = [] + else: + stream_frames, stream_events = stream.stream_reset(frame) + return (stream_frames, events + stream_events) + + def _receive_priority_frame(self, frame): + """ + Receive a PRIORITY frame on the connection. + """ + events = self.state_machine.process_input(ConnectionInputs.RECV_PRIORITY) + event = PriorityUpdated() + event.stream_id = frame.stream_id + event.depends_on = frame.depends_on + event.exclusive = frame.exclusive + event.weight = frame.stream_weight + 1 + if event.depends_on == frame.stream_id: + raise ProtocolError("Stream %d may not depend on itself" % frame.stream_id) + events.append(event) + return ([], events) + + def _receive_goaway_frame(self, frame): + """ + Receive a GOAWAY frame on the connection. + """ + events = self.state_machine.process_input(ConnectionInputs.RECV_GOAWAY) + self.clear_outbound_data_buffer() + new_event = ConnectionTerminated() + new_event.error_code = _error_code_from_int(frame.error_code) + new_event.last_stream_id = frame.last_stream_id + new_event.additional_data = frame.additional_data if frame.additional_data else None + events.append(new_event) + return ([], events) + + def _receive_naked_continuation(self, frame): + """ + A naked CONTINUATION frame has been received. This is always an error, + but the type of error it is depends on the state of the stream and must + transition the state of the stream, so we need to pass it to the + appropriate stream. + """ + stream = self._get_stream_by_id(frame.stream_id) + stream.receive_continuation() + assert False, "Should not be reachable" + + def _receive_alt_svc_frame(self, frame): + """ + An ALTSVC frame has been received. This frame, specified in RFC 7838, + is used to advertise alternative places where the same service can be + reached. + + This frame can optionally be received either on a stream or on stream + 0, and its semantics are different in each case. + """ + events = self.state_machine.process_input(ConnectionInputs.RECV_ALTERNATIVE_SERVICE) + frames = [] + if frame.stream_id: + try: + stream = self._get_stream_by_id(frame.stream_id) + except (NoSuchStreamError, StreamClosedError): + pass + else: + stream_frames, stream_events = stream.receive_alt_svc(frame) + frames.extend(stream_frames) + events.extend(stream_events) + else: + if not frame.origin: + return ( + frames, events) + else: + return self.config.client_side or ( + frames, events) + event = AlternativeServiceAvailable() + event.origin = frame.origin + event.field_value = frame.field + events.append(event) + return (frames, events) + + def _receive_unknown_frame(self, frame): + """ + We have received a frame that we do not understand. This is almost + certainly an extension frame, though it's impossible to be entirely + sure. + + RFC 7540 § 5.5 says that we MUST ignore unknown frame types: so we + do. + """ + self.config.logger.debug("Received unknown extension frame (ID %d)", frame.stream_id) + return ([], []) + + def _local_settings_acked(self): + """ + Handle the local settings being ACKed, update internal state. + """ + changes = self.local_settings.acknowledge() + if SettingCodes.INITIAL_WINDOW_SIZE in changes: + setting = changes[SettingCodes.INITIAL_WINDOW_SIZE] + self._inbound_flow_control_change_from_settings(setting.original_value, setting.new_value) + if SettingCodes.MAX_HEADER_LIST_SIZE in changes: + setting = changes[SettingCodes.MAX_HEADER_LIST_SIZE] + self.decoder.max_header_list_size = setting.new_value + if SettingCodes.MAX_FRAME_SIZE in changes: + setting = changes[SettingCodes.MAX_FRAME_SIZE] + self.max_inbound_frame_size = setting.new_value + if SettingCodes.HEADER_TABLE_SIZE in changes: + setting = changes[SettingCodes.HEADER_TABLE_SIZE] + self.decoder.max_allowed_table_size = setting.new_value + return changes + + def _stream_id_is_outbound(self, stream_id): + """ + Returns ``True`` if the stream ID corresponds to an outbound stream + (one initiated by this peer), returns ``False`` otherwise. + """ + return stream_id % 2 == int(self.config.client_side) + + def _stream_closed_by(self, stream_id): + """ + Returns how the stream was closed. + + The return value will be either a member of + ``h2.stream.StreamClosedBy`` or ``None``. If ``None``, the stream was + closed implicitly by the peer opening a stream with a higher stream ID + before opening this one. + """ + if stream_id in self.streams: + return self.streams[stream_id].closed_by + if stream_id in self._closed_streams: + return self._closed_streams[stream_id] + + def _stream_is_closed_by_reset(self, stream_id): + """ + Returns ``True`` if the stream was closed by sending or receiving a + RST_STREAM frame. Returns ``False`` otherwise. + """ + return self._stream_closed_by(stream_id) in ( + StreamClosedBy.RECV_RST_STREAM, StreamClosedBy.SEND_RST_STREAM) + + def _stream_is_closed_by_end(self, stream_id): + """ + Returns ``True`` if the stream was closed by sending or receiving an + END_STREAM flag in a HEADERS or DATA frame. Returns ``False`` + otherwise. + """ + return self._stream_closed_by(stream_id) in ( + StreamClosedBy.RECV_END_STREAM, StreamClosedBy.SEND_END_STREAM) + + +def _add_frame_priority(frame, weight=None, depends_on=None, exclusive=None): + """ + Adds priority data to a given frame. Does not change any flags set on that + frame: if the caller is adding priority information to a HEADERS frame they + must set that themselves. + + This method also deliberately sets defaults for anything missing. + + This method validates the input values. + """ + if depends_on == frame.stream_id: + raise ProtocolError("Stream %d may not depend on itself" % frame.stream_id) + elif not weight is not None or weight > 256 or weight < 1: + raise ProtocolError("Weight must be between 1 and 256, not %d" % weight) + else: + weight -= 1 + weight = weight if weight is not None else 15 + depends_on = depends_on if depends_on is not None else 0 + exclusive = exclusive if exclusive is not None else False + frame.stream_weight = weight + frame.depends_on = depends_on + frame.exclusive = exclusive + return frame + + +def _decode_headers(decoder, encoded_header_block): + """ + Decode a HPACK-encoded header block, translating HPACK exceptions into + sensible hyper-h2 errors. + + This only ever returns bytestring headers: hyper-h2 may emit them as + unicode later, but internally it processes them as bytestrings only. + """ + try: + return decoder.decode(encoded_header_block, raw=True) + except OversizedHeaderListError as e: + try: + raise DenialOfServiceError("Oversized header block: %s" % e) + finally: + e = None + del e + + except (HPACKError, IndexError, TypeError, UnicodeDecodeError) as e: + try: + raise ProtocolError("Error decoding header block: %s" % e) + finally: + e = None + del e diff --git a/APPS_UNCOMPILED/lib/h2/errors.py b/APPS_UNCOMPILED/lib/h2/errors.py new file mode 100644 index 0000000..e2e737c --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/errors.py @@ -0,0 +1,72 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/errors.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5040 bytes +""" +h2/errors +~~~~~~~~~~~~~~~~~~~ + +Global error code registry containing the established HTTP/2 error codes. + +The current registry is available at: +https://tools.ietf.org/html/rfc7540#section-11.4 +""" +import enum + +class ErrorCodes(enum.IntEnum): + __doc__ = "\n All known HTTP/2 error codes.\n\n .. versionadded:: 2.5.0\n " + NO_ERROR = 0 + PROTOCOL_ERROR = 1 + INTERNAL_ERROR = 2 + FLOW_CONTROL_ERROR = 3 + SETTINGS_TIMEOUT = 4 + STREAM_CLOSED = 5 + FRAME_SIZE_ERROR = 6 + REFUSED_STREAM = 7 + CANCEL = 8 + COMPRESSION_ERROR = 9 + CONNECT_ERROR = 10 + ENHANCE_YOUR_CALM = 11 + INADEQUATE_SECURITY = 12 + HTTP_1_1_REQUIRED = 13 + + +def _error_code_from_int(code): + """ + Given an integer error code, returns either one of :class:`ErrorCodes + ` or, if not present in the known set of codes, + returns the integer directly. + """ + try: + return ErrorCodes(code) + except ValueError: + return code + + +NO_ERROR = ErrorCodes.NO_ERROR +PROTOCOL_ERROR = ErrorCodes.PROTOCOL_ERROR +INTERNAL_ERROR = ErrorCodes.INTERNAL_ERROR +FLOW_CONTROL_ERROR = ErrorCodes.FLOW_CONTROL_ERROR +SETTINGS_TIMEOUT = ErrorCodes.SETTINGS_TIMEOUT +STREAM_CLOSED = ErrorCodes.STREAM_CLOSED +FRAME_SIZE_ERROR = ErrorCodes.FRAME_SIZE_ERROR +REFUSED_STREAM = ErrorCodes.REFUSED_STREAM +CANCEL = ErrorCodes.CANCEL +COMPRESSION_ERROR = ErrorCodes.COMPRESSION_ERROR +CONNECT_ERROR = ErrorCodes.CONNECT_ERROR +ENHANCE_YOUR_CALM = ErrorCodes.ENHANCE_YOUR_CALM +INADEQUATE_SECURITY = ErrorCodes.INADEQUATE_SECURITY +HTTP_1_1_REQUIRED = ErrorCodes.HTTP_1_1_REQUIRED +H2_ERRORS = list(ErrorCodes) +__all__ = [ + 'H2_ERRORS', 'NO_ERROR', 'PROTOCOL_ERROR', 'INTERNAL_ERROR', + 'FLOW_CONTROL_ERROR', + 'SETTINGS_TIMEOUT', 'STREAM_CLOSED', + 'FRAME_SIZE_ERROR', 'REFUSED_STREAM', + 'CANCEL', 'COMPRESSION_ERROR', + 'CONNECT_ERROR', 'ENHANCE_YOUR_CALM', 'INADEQUATE_SECURITY', + 'HTTP_1_1_REQUIRED', + 'ErrorCodes'] diff --git a/APPS_UNCOMPILED/lib/h2/events.py b/APPS_UNCOMPILED/lib/h2/events.py new file mode 100644 index 0000000..98fb28b --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/events.py @@ -0,0 +1,277 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/events.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 20478 bytes +""" +h2/events +~~~~~~~~~ + +Defines Event types for HTTP/2. + +Events are returned by the H2 state machine to allow implementations to keep +track of events triggered by receiving data. Each time data is provided to the +H2 state machine it processes the data and returns a list of Event objects. +""" +import binascii +from .settings import ChangedSetting, _setting_code_from_int + +class Event(object): + __doc__ = "\n Base class for h2 events.\n " + + +class RequestReceived(Event): + __doc__ = "\n The RequestReceived event is fired whenever request headers are received.\n This event carries the HTTP headers for the given request and the stream ID\n of the new stream.\n\n .. versionchanged:: 2.3.0\n Changed the type of ``headers`` to :class:`HeaderTuple\n `. This has no effect on current users.\n\n .. versionchanged:: 2.4.0\n Added ``stream_ended`` and ``priority_updated`` properties.\n " + + def __init__(self): + self.stream_id = None + self.headers = None + self.stream_ended = None + self.priority_updated = None + + def __repr__(self): + return "" % ( + self.stream_id, self.headers) + + +class ResponseReceived(Event): + __doc__ = "\n The ResponseReceived event is fired whenever response headers are received.\n This event carries the HTTP headers for the given response and the stream\n ID of the new stream.\n\n .. versionchanged:: 2.3.0\n Changed the type of ``headers`` to :class:`HeaderTuple\n `. This has no effect on current users.\n\n .. versionchanged:: 2.4.0\n Added ``stream_ended`` and ``priority_updated`` properties.\n " + + def __init__(self): + self.stream_id = None + self.headers = None + self.stream_ended = None + self.priority_updated = None + + def __repr__(self): + return "" % ( + self.stream_id, self.headers) + + +class TrailersReceived(Event): + __doc__ = "\n The TrailersReceived event is fired whenever trailers are received on a\n stream. Trailers are a set of headers sent after the body of the\n request/response, and are used to provide information that wasn't known\n ahead of time (e.g. content-length). This event carries the HTTP header\n fields that form the trailers and the stream ID of the stream on which they\n were received.\n\n .. versionchanged:: 2.3.0\n Changed the type of ``headers`` to :class:`HeaderTuple\n `. This has no effect on current users.\n\n .. versionchanged:: 2.4.0\n Added ``stream_ended`` and ``priority_updated`` properties.\n " + + def __init__(self): + self.stream_id = None + self.headers = None + self.stream_ended = None + self.priority_updated = None + + def __repr__(self): + return "" % ( + self.stream_id, self.headers) + + +class _HeadersSent(Event): + __doc__ = "\n The _HeadersSent event is fired whenever headers are sent.\n\n This is an internal event, used to determine validation steps on\n outgoing header blocks.\n " + + +class _ResponseSent(_HeadersSent): + __doc__ = "\n The _ResponseSent event is fired whenever response headers are sent\n on a stream.\n\n This is an internal event, used to determine validation steps on\n outgoing header blocks.\n " + + +class _RequestSent(_HeadersSent): + __doc__ = "\n The _RequestSent event is fired whenever request headers are sent\n on a stream.\n\n This is an internal event, used to determine validation steps on\n outgoing header blocks.\n " + + +class _TrailersSent(_HeadersSent): + __doc__ = "\n The _TrailersSent event is fired whenever trailers are sent on a\n stream. Trailers are a set of headers sent after the body of the\n request/response, and are used to provide information that wasn't known\n ahead of time (e.g. content-length).\n\n This is an internal event, used to determine validation steps on\n outgoing header blocks.\n " + + +class _PushedRequestSent(_HeadersSent): + __doc__ = "\n The _PushedRequestSent event is fired whenever pushed request headers are\n sent.\n\n This is an internal event, used to determine validation steps on outgoing\n header blocks.\n " + + +class InformationalResponseReceived(Event): + __doc__ = "\n The InformationalResponseReceived event is fired when an informational\n response (that is, one whose status code is a 1XX code) is received from\n the remote peer.\n\n The remote peer may send any number of these, from zero upwards. These\n responses are most commonly sent in response to requests that have the\n ``expect: 100-continue`` header field present. Most users can safely\n ignore this event unless you are intending to use the\n ``expect: 100-continue`` flow, or are for any reason expecting a different\n 1XX status code.\n\n .. versionadded:: 2.2.0\n\n .. versionchanged:: 2.3.0\n Changed the type of ``headers`` to :class:`HeaderTuple\n `. This has no effect on current users.\n\n .. versionchanged:: 2.4.0\n Added ``priority_updated`` property.\n " + + def __init__(self): + self.stream_id = None + self.headers = None + self.priority_updated = None + + def __repr__(self): + return "" % ( + self.stream_id, self.headers) + + +class DataReceived(Event): + __doc__ = "\n The DataReceived event is fired whenever data is received on a stream from\n the remote peer. The event carries the data itself, and the stream ID on\n which the data was received.\n\n .. versionchanged:: 2.4.0\n Added ``stream_ended`` property.\n " + + def __init__(self): + self.stream_id = None + self.data = None + self.flow_controlled_length = None + self.stream_ended = None + + def __repr__(self): + return "" % ( + self.stream_id, + self.flow_controlled_length, + _bytes_representation(self.data[None[:20]])) + + +class WindowUpdated(Event): + __doc__ = "\n The WindowUpdated event is fired whenever a flow control window changes\n size. HTTP/2 defines flow control windows for connections and streams: this\n event fires for both connections and streams. The event carries the ID of\n the stream to which it applies (set to zero if the window update applies to\n the connection), and the delta in the window size.\n " + + def __init__(self): + self.stream_id = None + self.delta = None + + def __repr__(self): + return "" % ( + self.stream_id, self.delta) + + +class RemoteSettingsChanged(Event): + __doc__ = "\n The RemoteSettingsChanged event is fired whenever the remote peer changes\n its settings. It contains a complete inventory of changed settings,\n including their previous values.\n\n In HTTP/2, settings changes need to be acknowledged. hyper-h2 automatically\n acknowledges settings changes for efficiency. However, it is possible that\n the caller may not be happy with the changed setting.\n\n When this event is received, the caller should confirm that the new\n settings are acceptable. If they are not acceptable, the user should close\n the connection with the error code :data:`PROTOCOL_ERROR\n `.\n\n .. versionchanged:: 2.0.0\n Prior to this version the user needed to acknowledge settings changes.\n This is no longer the case: hyper-h2 now automatically acknowledges\n them.\n " + + def __init__(self): + self.changed_settings = {} + + @classmethod + def from_settings(cls, old_settings, new_settings): + """ + Build a RemoteSettingsChanged event from a set of changed settings. + + :param old_settings: A complete collection of old settings, in the form + of a dictionary of ``{setting: value}``. + :param new_settings: All the changed settings and their new values, in + the form of a dictionary of ``{setting: value}``. + """ + e = cls() + for setting, new_value in new_settings.items(): + setting = _setting_code_from_int(setting) + original_value = old_settings.get(setting) + change = ChangedSetting(setting, original_value, new_value) + e.changed_settings[setting] = change + + return e + + def __repr__(self): + return "" % ( + ", ".join((repr(cs) for cs in self.changed_settings.values())),) + + +class PingAcknowledged(Event): + __doc__ = "\n The PingAcknowledged event is fired whenever a user-emitted PING is\n acknowledged. This contains the data in the ACK'ed PING, allowing the\n user to correlate PINGs and calculate RTT.\n " + + def __init__(self): + self.ping_data = None + + def __repr__(self): + return "" % ( + _bytes_representation(self.ping_data),) + + +class StreamEnded(Event): + __doc__ = "\n The StreamEnded event is fired whenever a stream is ended by a remote\n party. The stream may not be fully closed if it has not been closed\n locally, but no further data or headers should be expected on that stream.\n " + + def __init__(self): + self.stream_id = None + + def __repr__(self): + return "" % self.stream_id + + +class StreamReset(Event): + __doc__ = "\n The StreamReset event is fired in two situations. The first is when the\n remote party forcefully resets the stream. The second is when the remote\n party has made a protocol error which only affects a single stream. In this\n case, Hyper-h2 will terminate the stream early and return this event.\n\n .. versionchanged:: 2.0.0\n This event is now fired when Hyper-h2 automatically resets a stream.\n " + + def __init__(self): + self.stream_id = None + self.error_code = None + self.remote_reset = True + + def __repr__(self): + return "" % ( + self.stream_id, self.error_code, self.remote_reset) + + +class PushedStreamReceived(Event): + __doc__ = "\n The PushedStreamReceived event is fired whenever a pushed stream has been\n received from a remote peer. The event carries on it the new stream ID, the\n ID of the parent stream, and the request headers pushed by the remote peer.\n " + + def __init__(self): + self.pushed_stream_id = None + self.parent_stream_id = None + self.headers = None + + def __repr__(self): + return "" % ( + self.pushed_stream_id, + self.parent_stream_id, + self.headers) + + +class SettingsAcknowledged(Event): + __doc__ = "\n The SettingsAcknowledged event is fired whenever a settings ACK is received\n from the remote peer. The event carries on it the settings that were\n acknowedged, in the same format as\n :class:`h2.events.RemoteSettingsChanged`.\n " + + def __init__(self): + self.changed_settings = {} + + def __repr__(self): + return "" % ( + ", ".join((repr(cs) for cs in self.changed_settings.values())),) + + +class PriorityUpdated(Event): + __doc__ = "\n The PriorityUpdated event is fired whenever a stream sends updated priority\n information. This can occur when the stream is opened, or at any time\n during the stream lifetime.\n\n This event is purely advisory, and does not need to be acted on.\n\n .. versionadded:: 2.0.0\n " + + def __init__(self): + self.stream_id = None + self.weight = None + self.depends_on = None + self.exclusive = None + + def __repr__(self): + return "" % ( + self.stream_id, + self.weight, + self.depends_on, + self.exclusive) + + +class ConnectionTerminated(Event): + __doc__ = "\n The ConnectionTerminated event is fired when a connection is torn down by\n the remote peer using a GOAWAY frame. Once received, no further action may\n be taken on the connection: a new connection must be established.\n " + + def __init__(self): + self.error_code = None + self.last_stream_id = None + self.additional_data = None + + def __repr__(self): + return "" % ( + self.error_code, + self.last_stream_id, + _bytes_representation(self.additional_data[None[:20]] if self.additional_data else None)) + + +class AlternativeServiceAvailable(Event): + __doc__ = '\n The AlternativeServiceAvailable event is fired when the remote peer\n advertises an `RFC 7838 `_ Alternative\n Service using an ALTSVC frame.\n\n This event always carries the origin to which the ALTSVC information\n applies. That origin is either supplied by the server directly, or inferred\n by hyper-h2 from the ``:authority`` pseudo-header field that was sent by\n the user when initiating a given stream.\n\n This event also carries what RFC 7838 calls the "Alternative Service Field\n Value", which is formatted like a HTTP header field and contains the\n relevant alternative service information. Hyper-h2 does not parse or in any\n way modify that information: the user is required to do that.\n\n This event can only be fired on the client end of a connection.\n\n .. versionadded:: 2.3.0\n ' + + def __init__(self): + self.origin = None + self.field_value = None + + def __repr__(self): + return "" % ( + self.origin.decode("utf-8", "ignore"), + self.field_value.decode("utf-8", "ignore")) + + +def _bytes_representation(data): + """ + Converts a bytestring into something that is safe to print on all Python + platforms. + + This function is relatively expensive, so it should not be called on the + mainline of the code. It's safe to use in things like object repr methods + though. + """ + if data is None: + return + hex = binascii.hexlify(data) + if not isinstance(hex, str): + hex = hex.decode("ascii") + return hex diff --git a/APPS_UNCOMPILED/lib/h2/exceptions.py b/APPS_UNCOMPILED/lib/h2/exceptions.py new file mode 100644 index 0000000..087814d --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/exceptions.py @@ -0,0 +1,106 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/exceptions.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5379 bytes +""" +h2/exceptions +~~~~~~~~~~~~~ + +Exceptions for the HTTP/2 module. +""" +import h2.errors + +class H2Error(Exception): + __doc__ = "\n The base class for all exceptions for the HTTP/2 module.\n " + + +class ProtocolError(H2Error): + __doc__ = "\n An action was attempted in violation of the HTTP/2 protocol.\n " + error_code = h2.errors.ErrorCodes.PROTOCOL_ERROR + + +class FrameTooLargeError(ProtocolError): + __doc__ = "\n The frame that we tried to send or that we received was too large.\n " + error_code = h2.errors.ErrorCodes.FRAME_SIZE_ERROR + + +class FrameDataMissingError(ProtocolError): + __doc__ = "\n The frame that we received is missing some data.\n\n .. versionadded:: 2.0.0\n " + error_code = h2.errors.ErrorCodes.FRAME_SIZE_ERROR + + +class TooManyStreamsError(ProtocolError): + __doc__ = "\n An attempt was made to open a stream that would lead to too many concurrent\n streams.\n " + + +class FlowControlError(ProtocolError): + __doc__ = "\n An attempted action violates flow control constraints.\n " + error_code = h2.errors.ErrorCodes.FLOW_CONTROL_ERROR + + +class StreamIDTooLowError(ProtocolError): + __doc__ = "\n An attempt was made to open a stream that had an ID that is lower than the\n highest ID we have seen on this connection.\n " + + def __init__(self, stream_id, max_stream_id): + self.stream_id = stream_id + self.max_stream_id = max_stream_id + + def __str__(self): + return "StreamIDTooLowError: %d is lower than %d" % ( + self.stream_id, self.max_stream_id) + + +class NoAvailableStreamIDError(ProtocolError): + __doc__ = "\n There are no available stream IDs left to the connection. All stream IDs\n have been exhausted.\n\n .. versionadded:: 2.0.0\n " + + +class NoSuchStreamError(ProtocolError): + __doc__ = "\n A stream-specific action referenced a stream that does not exist.\n\n .. versionchanged:: 2.0.0\n Became a subclass of :class:`ProtocolError\n `\n " + + def __init__(self, stream_id): + self.stream_id = stream_id + + +class StreamClosedError(NoSuchStreamError): + __doc__ = "\n A more specific form of\n :class:`NoSuchStreamError `. Indicates\n that the stream has since been closed, and that all state relating to that\n stream has been removed.\n " + + def __init__(self, stream_id): + self.stream_id = stream_id + self.error_code = h2.errors.ErrorCodes.STREAM_CLOSED + self._events = [] + + +class InvalidSettingsValueError(ProtocolError, ValueError): + __doc__ = "\n An attempt was made to set an invalid Settings value.\n\n .. versionadded:: 2.0.0\n " + + def __init__(self, msg, error_code): + super(InvalidSettingsValueError, self).__init__(msg) + self.error_code = error_code + + +class InvalidBodyLengthError(ProtocolError): + __doc__ = "\n The remote peer sent more or less data that the Content-Length header\n indicated.\n\n .. versionadded:: 2.0.0\n " + + def __init__(self, expected, actual): + self.expected_length = expected + self.actual_length = actual + + def __str__(self): + return "InvalidBodyLengthError: Expected %d bytes, received %d" % ( + self.expected_length, self.actual_length) + + +class UnsupportedFrameError(ProtocolError, KeyError): + __doc__ = "\n The remote peer sent a frame that is unsupported in this context.\n\n .. versionadded:: 2.1.0\n " + + +class RFC1122Error(H2Error): + __doc__ = "\n Emitted when users attempt to do something that is literally allowed by the\n relevant RFC, but is sufficiently ill-defined that it's unwise to allow\n users to actually do it.\n\n While there is some disagreement about whether or not we should be liberal\n in what accept, it is a truth universally acknowledged that we should be\n conservative in what emit.\n\n .. versionadded:: 2.4.0\n " + + +class DenialOfServiceError(ProtocolError): + __doc__ = "\n Emitted when the remote peer exhibits a behaviour that is likely to be an\n attempt to perform a Denial of Service attack on the implementation. This\n is a form of ProtocolError that carries a different error code, and allows\n more easy detection of this kind of behaviour.\n\n .. versionadded:: 2.5.0\n " + error_code = h2.errors.ErrorCodes.ENHANCE_YOUR_CALM diff --git a/APPS_UNCOMPILED/lib/h2/frame_buffer.py b/APPS_UNCOMPILED/lib/h2/frame_buffer.py new file mode 100644 index 0000000..b131efd --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/frame_buffer.py @@ -0,0 +1,135 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/frame_buffer.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 7300 bytes +""" +h2/frame_buffer +~~~~~~~~~~~~~~~ + +A data structure that provides a way to iterate over a byte buffer in terms of +frames. +""" +from hyperframe.exceptions import UnknownFrameError, InvalidFrameError +from hyperframe.frame import Frame, HeadersFrame, ContinuationFrame, PushPromiseFrame +from .exceptions import ProtocolError, FrameTooLargeError, FrameDataMissingError +CONTINUATION_BACKLOG = 64 + +class FrameBuffer(object): + __doc__ = "\n This is a data structure that expects to act as a buffer for HTTP/2 data\n that allows iteraton in terms of H2 frames.\n " + + def __init__(self, server=False): + self.data = b'' + self.max_frame_size = 0 + self._preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n' if server else b'' + self._preamble_len = len(self._preamble) + self._headers_buffer = [] + + def add_data(self, data): + """ + Add more data to the frame buffer. + + :param data: A bytestring containing the byte buffer. + """ + if self._preamble_len: + data_len = len(data) + of_which_preamble = min(self._preamble_len, data_len) + if self._preamble[None[:of_which_preamble]] != data[None[:of_which_preamble]]: + raise ProtocolError("Invalid HTTP/2 preamble.") + data = data[of_which_preamble[:None]] + self._preamble_len -= of_which_preamble + self._preamble = self._preamble[of_which_preamble[:None]] + self.data += data + + def _parse_frame_header(self, data): + """ + Parses the frame header from the data. Either returns a tuple of + (frame, length), or throws an exception. The returned frame may be None + if the frame is of unknown type. + """ + try: + frame, length = Frame.parse_frame_header(data[None[:9]]) + except UnknownFrameError as e: + try: + length = e.length + frame = None + finally: + e = None + del e + + except ValueError as e: + try: + raise ProtocolError("Invalid frame header received: %s" % str(e)) + finally: + e = None + del e + + return ( + frame, length) + + def _validate_frame_length(self, length): + """ + Confirm that the frame is an appropriate length. + """ + if length > self.max_frame_size: + raise FrameTooLargeError("Received overlong frame: length %d, max %d" % ( + length, self.max_frame_size)) + + def _update_header_buffer(self, f): + """ + Updates the internal header buffer. Returns a frame that should replace + the current one. May throw exceptions if this frame is invalid. + """ + if self._headers_buffer: + stream_id = self._headers_buffer[0].stream_id + valid_frame = f is not None and isinstance(f, ContinuationFrame) and f.stream_id == stream_id + if not valid_frame: + raise ProtocolError("Invalid frame during header block.") + self._headers_buffer.append(f) + if len(self._headers_buffer) > CONTINUATION_BACKLOG: + raise ProtocolError("Too many continuation frames received.") + if "END_HEADERS" in f.flags: + f = self._headers_buffer[0] + f.flags.add("END_HEADERS") + f.data = (b'').join((x.data for x in self._headers_buffer)) + self._headers_buffer = [] + else: + f = None + else: + if isinstance(f, (HeadersFrame, PushPromiseFrame)): + if "END_HEADERS" not in f.flags: + self._headers_buffer.append(f) + f = None + return f + + def __iter__(self): + return self + + def next(self): + if len(self.data) < 9: + raise StopIteration() + else: + try: + f, length = self._parse_frame_header(self.data) + except InvalidFrameError: + raise ProtocolError("Received frame with invalid frame header.") + + if len(self.data) < length + 9: + raise StopIteration() + self._validate_frame_length(length) + if f is not None: + try: + f.parse_body(memoryview(self.data[9[:9 + length]])) + except InvalidFrameError: + raise FrameDataMissingError("Frame data missing or invalid") + + self.data = self.data[(9 + length)[:None]] + f = self._update_header_buffer(f) + if f is not None: + return f + return self.next() + + def __next__(self): + return self.next() diff --git a/APPS_UNCOMPILED/lib/h2/settings.py b/APPS_UNCOMPILED/lib/h2/settings.py new file mode 100644 index 0000000..aa89f13 --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/settings.py @@ -0,0 +1,237 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/settings.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 13412 bytes +""" +h2/settings +~~~~~~~~~~~ + +This module contains a HTTP/2 settings object. This object provides a simple +API for manipulating HTTP/2 settings, keeping track of both the current active +state of the settings and the unacknowledged future values of the settings. +""" +import collections, enum +from hyperframe.frame import SettingsFrame +from h2.errors import ErrorCodes +from h2.exceptions import InvalidSettingsValueError + +class SettingCodes(enum.IntEnum): + __doc__ = "\n All known HTTP/2 setting codes.\n\n .. versionadded:: 2.6.0\n " + HEADER_TABLE_SIZE = SettingsFrame.HEADER_TABLE_SIZE + ENABLE_PUSH = SettingsFrame.ENABLE_PUSH + MAX_CONCURRENT_STREAMS = SettingsFrame.MAX_CONCURRENT_STREAMS + INITIAL_WINDOW_SIZE = SettingsFrame.INITIAL_WINDOW_SIZE + try: + _max_frame_size = SettingsFrame.SETTINGS_MAX_FRAME_SIZE + except AttributeError: + _max_frame_size = SettingsFrame.MAX_FRAME_SIZE + + MAX_FRAME_SIZE = _max_frame_size + try: + _max_header_list_size = SettingsFrame.SETTINGS_MAX_HEADER_LIST_SIZE + except AttributeError: + _max_header_list_size = SettingsFrame.MAX_HEADER_LIST_SIZE + + MAX_HEADER_LIST_SIZE = _max_header_list_size + + +def _setting_code_from_int(code): + """ + Given an integer setting code, returns either one of :class:`SettingCodes + ` or, if not present in the known set of codes, + returns the integer directly. + """ + try: + return SettingCodes(code) + except ValueError: + return code + + +HEADER_TABLE_SIZE = SettingCodes.HEADER_TABLE_SIZE +ENABLE_PUSH = SettingCodes.ENABLE_PUSH +MAX_CONCURRENT_STREAMS = SettingCodes.MAX_CONCURRENT_STREAMS +INITIAL_WINDOW_SIZE = SettingCodes.INITIAL_WINDOW_SIZE +MAX_FRAME_SIZE = SettingCodes.MAX_FRAME_SIZE +MAX_HEADER_LIST_SIZE = SettingCodes.MAX_HEADER_LIST_SIZE + +class ChangedSetting: + + def __init__(self, setting, original_value, new_value): + self.setting = setting + self.original_value = original_value + self.new_value = new_value + + def __repr__(self): + return "ChangedSetting(setting=%s, original_value=%s, new_value=%s)" % ( + self.setting, + self.original_value, + self.new_value) + + +class Settings(collections.MutableMapping): + __doc__ = '\n An object that encapsulates HTTP/2 settings state.\n\n HTTP/2 Settings are a complex beast. Each party, remote and local, has its\n own settings and a view of the other party\'s settings. When a settings\n frame is emitted by a peer it cannot assume that the new settings values\n are in place until the remote peer acknowledges the setting. In principle,\n multiple settings changes can be "in flight" at the same time, all with\n different values.\n\n This object encapsulates this mess. It provides a dict-like interface to\n settings, which return the *current* values of the settings in question.\n Additionally, it keeps track of the stack of proposed values: each time an\n acknowledgement is sent/received, it updates the current values with the\n stack of proposed values. On top of all that, it validates the values to\n make sure they\'re allowed, and raises :class:`InvalidSettingsValueError\n ` if they are not.\n\n Finally, this object understands what the default values of the HTTP/2\n settings are, and sets those defaults appropriately.\n\n .. versionchanged:: 2.2.0\n Added the ``initial_values`` parameter.\n\n .. versionchanged:: 2.5.0\n Added the ``max_header_list_size`` property.\n\n :param client: (optional) Whether these settings should be defaulted for a\n client implementation or a server implementation. Defaults to ``True``.\n :type client: ``bool``\n :param initial_values: (optional) Any initial values the user would like\n set, rather than RFC 7540\'s defaults.\n :type initial_vales: ``MutableMapping``\n ' + + def __init__(self, client=True, initial_values=None): + self._settings = {(SettingCodes.HEADER_TABLE_SIZE): (collections.deque([4096])), + (SettingCodes.ENABLE_PUSH): (collections.deque([int(client)])), + (SettingCodes.INITIAL_WINDOW_SIZE): (collections.deque([65535])), + (SettingCodes.MAX_FRAME_SIZE): (collections.deque([16384]))} + if initial_values is not None: + for key, value in initial_values.items(): + invalid = _validate_setting(key, value) + if invalid: + raise InvalidSettingsValueError(("Setting %d has invalid value %d" % (key, value)), + error_code=invalid) + self._settings[key] = collections.deque([value]) + + def acknowledge(self): + """ + The settings have been acknowledged, either by the user (remote + settings) or by the remote peer (local settings). + + :returns: A dict of {setting: ChangedSetting} that were applied. + """ + changed_settings = {} + for k, v in self._settings.items(): + if len(v) > 1: + old_setting = v.popleft() + new_setting = v[0] + changed_settings[k] = ChangedSetting(k, old_setting, new_setting) + + return changed_settings + + @property + def header_table_size(self): + """ + The current value of the :data:`HEADER_TABLE_SIZE + ` setting. + """ + return self[SettingCodes.HEADER_TABLE_SIZE] + + @header_table_size.setter + def header_table_size(self, value): + self[SettingCodes.HEADER_TABLE_SIZE] = value + + @property + def enable_push(self): + """ + The current value of the :data:`ENABLE_PUSH + ` setting. + """ + return self[SettingCodes.ENABLE_PUSH] + + @enable_push.setter + def enable_push(self, value): + self[SettingCodes.ENABLE_PUSH] = value + + @property + def initial_window_size(self): + """ + The current value of the :data:`INITIAL_WINDOW_SIZE + ` setting. + """ + return self[SettingCodes.INITIAL_WINDOW_SIZE] + + @initial_window_size.setter + def initial_window_size(self, value): + self[SettingCodes.INITIAL_WINDOW_SIZE] = value + + @property + def max_frame_size(self): + """ + The current value of the :data:`MAX_FRAME_SIZE + ` setting. + """ + return self[SettingCodes.MAX_FRAME_SIZE] + + @max_frame_size.setter + def max_frame_size(self, value): + self[SettingCodes.MAX_FRAME_SIZE] = value + + @property + def max_concurrent_streams(self): + """ + The current value of the :data:`MAX_CONCURRENT_STREAMS + ` setting. + """ + return self.get(SettingCodes.MAX_CONCURRENT_STREAMS, 4294967297L) + + @max_concurrent_streams.setter + def max_concurrent_streams(self, value): + self[SettingCodes.MAX_CONCURRENT_STREAMS] = value + + @property + def max_header_list_size(self): + """ + The current value of the :data:`MAX_HEADER_LIST_SIZE + ` setting. If not set, + returns ``None``, which means unlimited. + + .. versionadded:: 2.5.0 + """ + return self.get(SettingCodes.MAX_HEADER_LIST_SIZE, None) + + @max_header_list_size.setter + def max_header_list_size(self, value): + self[SettingCodes.MAX_HEADER_LIST_SIZE] = value + + def __getitem__(self, key): + val = self._settings[key][0] + if val is None: + raise KeyError + return val + + def __setitem__(self, key, value): + invalid = _validate_setting(key, value) + if invalid: + raise InvalidSettingsValueError(("Setting %d has invalid value %d" % (key, value)), + error_code=invalid) + try: + items = self._settings[key] + except KeyError: + items = collections.deque([None]) + self._settings[key] = items + + items.append(value) + + def __delitem__(self, key): + del self._settings[key] + + def __iter__(self): + return self._settings.__iter__() + + def __len__(self): + return len(self._settings) + + def __eq__(self, other): + if isinstance(other, Settings): + return self._settings == other._settings + return NotImplemented + + def __ne__(self, other): + if isinstance(other, Settings): + return not self == other + return NotImplemented + + +def _validate_setting(setting, value): + """ + Confirms that a specific setting has a well-formed value. If the setting is + invalid, returns an error code. Otherwise, returns 0 (NO_ERROR). + """ + if setting == SettingCodes.ENABLE_PUSH: + if value not in (0, 1): + return ErrorCodes.PROTOCOL_ERROR + elif setting == SettingCodes.INITIAL_WINDOW_SIZE: + if not 0 <= value <= 2147483647: + return ErrorCodes.FLOW_CONTROL_ERROR + elif setting == SettingCodes.MAX_FRAME_SIZE: + if not 16384 <= value <= 16777215: + return ErrorCodes.PROTOCOL_ERROR + elif setting == SettingCodes.MAX_HEADER_LIST_SIZE: + if value < 0: + return ErrorCodes.PROTOCOL_ERROR + return 0 diff --git a/APPS_UNCOMPILED/lib/h2/stream.py b/APPS_UNCOMPILED/lib/h2/stream.py new file mode 100644 index 0000000..26de7dc --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/stream.py @@ -0,0 +1,1196 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/stream.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 55737 bytes +""" +h2/stream +~~~~~~~~~ + +An implementation of a HTTP/2 stream. +""" +import warnings +from enum import Enum, IntEnum +from hpack import HeaderTuple +from hyperframe.frame import HeadersFrame, ContinuationFrame, DataFrame, WindowUpdateFrame, RstStreamFrame, PushPromiseFrame, AltSvcFrame +from .errors import ErrorCodes, _error_code_from_int +from .events import RequestReceived, ResponseReceived, DataReceived, WindowUpdated, StreamEnded, PushedStreamReceived, StreamReset, TrailersReceived, InformationalResponseReceived, AlternativeServiceAvailable, _ResponseSent, _RequestSent, _TrailersSent, _PushedRequestSent +from .exceptions import ProtocolError, StreamClosedError, InvalidBodyLengthError, FlowControlError +from .utilities import guard_increment_window, is_informational_response, authority_from_headers, validate_headers, validate_outbound_headers, normalize_outbound_headers, HeaderValidationFlags, extract_method_header +from .windows import WindowManager + +class StreamState(IntEnum): + IDLE = 0 + RESERVED_REMOTE = 1 + RESERVED_LOCAL = 2 + OPEN = 3 + HALF_CLOSED_REMOTE = 4 + HALF_CLOSED_LOCAL = 5 + CLOSED = 6 + + +class StreamInputs(Enum): + SEND_HEADERS = 0 + SEND_PUSH_PROMISE = 1 + SEND_RST_STREAM = 2 + SEND_DATA = 3 + SEND_WINDOW_UPDATE = 4 + SEND_END_STREAM = 5 + RECV_HEADERS = 6 + RECV_PUSH_PROMISE = 7 + RECV_RST_STREAM = 8 + RECV_DATA = 9 + RECV_WINDOW_UPDATE = 10 + RECV_END_STREAM = 11 + RECV_CONTINUATION = 12 + SEND_INFORMATIONAL_HEADERS = 13 + RECV_INFORMATIONAL_HEADERS = 14 + SEND_ALTERNATIVE_SERVICE = 15 + RECV_ALTERNATIVE_SERVICE = 16 + UPGRADE_CLIENT = 17 + UPGRADE_SERVER = 18 + + +class StreamClosedBy(Enum): + SEND_END_STREAM = 0 + RECV_END_STREAM = 1 + SEND_RST_STREAM = 2 + RECV_RST_STREAM = 3 + + +STREAM_OPEN = [False for _ in range(0, len(StreamState))] +STREAM_OPEN[StreamState.OPEN] = True +STREAM_OPEN[StreamState.HALF_CLOSED_LOCAL] = True +STREAM_OPEN[StreamState.HALF_CLOSED_REMOTE] = True + +class H2StreamStateMachine(object): + __doc__ = "\n A single HTTP/2 stream state machine.\n\n This stream object implements basically the state machine described in\n RFC 7540 section 5.1.\n\n :param stream_id: The stream ID of this stream. This is stored primarily\n for logging purposes.\n " + + def __init__(self, stream_id): + self.state = StreamState.IDLE + self.stream_id = stream_id + self.client = None + self.headers_sent = None + self.trailers_sent = None + self.headers_received = None + self.trailers_received = None + self.stream_closed_by = None + + def process_input(self, input_): + """ + Process a specific input in the state machine. + """ + if not isinstance(input_, StreamInputs): + raise ValueError("Input must be an instance of StreamInputs") + else: + try: + func, target_state = _transitions[(self.state, input_)] + except KeyError: + old_state = self.state + self.state = StreamState.CLOSED + raise ProtocolError("Invalid input %s in state %s" % (input_, old_state)) + else: + previous_state = self.state + self.state = target_state + if func is not None: + try: + return func(self, previous_state) + except ProtocolError: + self.state = StreamState.CLOSED + raise + except AssertionError as e: + try: + self.state = StreamState.CLOSED + raise ProtocolError(e) + finally: + e = None + del e + + return [] + + def request_sent(self, previous_state): + """ + Fires when a request is sent. + """ + self.client = True + self.headers_sent = True + event = _RequestSent() + return [ + event] + + def response_sent(self, previous_state): + """ + Fires when something that should be a response is sent. This 'response' + may actually be trailers. + """ + if not (self.headers_sent or self.client is True): + if self.client is None: + raise ProtocolError("Client cannot send responses.") + self.headers_sent = True + event = _ResponseSent() + elif self.trailers_sent: + raise AssertionError + self.trailers_sent = True + event = _TrailersSent() + return [ + event] + + def request_received(self, previous_state): + """ + Fires when a request is received. + """ + assert not self.headers_received + assert not self.trailers_received + self.client = False + self.headers_received = True + event = RequestReceived() + event.stream_id = self.stream_id + return [event] + + def response_received(self, previous_state): + """ + Fires when a response is received. Also disambiguates between responses + and trailers. + """ + if not self.headers_received: + assert self.client is True + self.headers_received = True + event = ResponseReceived() + else: + assert not self.trailers_received + self.trailers_received = True + event = TrailersReceived() + event.stream_id = self.stream_id + return [event] + + def data_received(self, previous_state): + """ + Fires when data is received. + """ + event = DataReceived() + event.stream_id = self.stream_id + return [event] + + def window_updated(self, previous_state): + """ + Fires when a window update frame is received. + """ + event = WindowUpdated() + event.stream_id = self.stream_id + return [event] + + def stream_half_closed(self, previous_state): + """ + Fires when an END_STREAM flag is received in the OPEN state, + transitioning this stream to a HALF_CLOSED_REMOTE state. + """ + event = StreamEnded() + event.stream_id = self.stream_id + return [event] + + def stream_ended(self, previous_state): + """ + Fires when a stream is cleanly ended. + """ + self.stream_closed_by = StreamClosedBy.RECV_END_STREAM + event = StreamEnded() + event.stream_id = self.stream_id + return [event] + + def stream_reset(self, previous_state): + """ + Fired when a stream is forcefully reset. + """ + self.stream_closed_by = StreamClosedBy.RECV_RST_STREAM + event = StreamReset() + event.stream_id = self.stream_id + return [event] + + def send_new_pushed_stream(self, previous_state): + """ + Fires on the newly pushed stream, when pushed by the local peer. + + No event here, but definitionally this peer must be a server. + """ + assert self.client is None + self.client = False + self.headers_received = True + return [] + + def recv_new_pushed_stream(self, previous_state): + """ + Fires on the newly pushed stream, when pushed by the remote peer. + + No event here, but definitionally this peer must be a client. + """ + assert self.client is None + self.client = True + self.headers_sent = True + return [] + + def send_push_promise(self, previous_state): + """ + Fires on the already-existing stream when a PUSH_PROMISE frame is sent. + We may only send PUSH_PROMISE frames if we're a server. + """ + if self.client is True: + raise ProtocolError("Cannot push streams from client peers.") + event = _PushedRequestSent() + return [event] + + def recv_push_promise(self, previous_state): + """ + Fires on the already-existing stream when a PUSH_PROMISE frame is + received. We may only receive PUSH_PROMISE frames if we're a client. + + Fires a PushedStreamReceived event. + """ + if not self.client: + if self.client is None: + msg = "Idle streams cannot receive pushes" + else: + msg = "Cannot receive pushed streams as a server" + raise ProtocolError(msg) + event = PushedStreamReceived() + event.parent_stream_id = self.stream_id + return [event] + + def send_end_stream(self, previous_state): + """ + Called when an attempt is made to send END_STREAM in the + HALF_CLOSED_REMOTE state. + """ + self.stream_closed_by = StreamClosedBy.SEND_END_STREAM + + def send_reset_stream(self, previous_state): + """ + Called when an attempt is made to send RST_STREAM in a non-closed + stream state. + """ + self.stream_closed_by = StreamClosedBy.SEND_RST_STREAM + + def reset_stream_on_error(self, previous_state): + """ + Called when we need to forcefully emit another RST_STREAM frame on + behalf of the state machine. + + If this is the first time we've done this, we should also hang an event + off the StreamClosedError so that the user can be informed. We know + it's the first time we've done this if the stream is currently in a + state other than CLOSED. + """ + self.stream_closed_by = StreamClosedBy.SEND_RST_STREAM + error = StreamClosedError(self.stream_id) + event = StreamReset() + event.stream_id = self.stream_id + event.error_code = ErrorCodes.STREAM_CLOSED + event.remote_reset = False + error._events = [event] + raise error + + def recv_on_closed_stream(self, previous_state): + """ + Called when an unexpected frame is received on an already-closed + stream. + + An endpoint that receives an unexpected frame should treat it as + a stream error or connection error with type STREAM_CLOSED, depending + on the specific frame. The error handling is done at a higher level: + this just raises the appropriate error. + """ + raise StreamClosedError(self.stream_id) + + def send_on_closed_stream(self, previous_state): + """ + Called when an attempt is made to send data on an already-closed + stream. + + This essentially overrides the standard logic by throwing a + more-specific error: StreamClosedError. This is a ProtocolError, so it + matches the standard API of the state machine, but provides more detail + to the user. + """ + raise StreamClosedError(self.stream_id) + + def recv_push_on_closed_stream(self, previous_state): + """ + Called when a PUSH_PROMISE frame is received on a full stop + stream. + + If the stream was closed by us sending a RST_STREAM frame, then we + presume that the PUSH_PROMISE was in flight when we reset the parent + stream. Rathen than accept the new stream, we just reset it. + Otherwise, we should call this a PROTOCOL_ERROR: pushing a stream on a + naturally closed stream is a real problem because it creates a brand + new stream that the remote peer now believes exists. + """ + if not self.stream_closed_by is not None: + raise AssertionError + elif self.stream_closed_by == StreamClosedBy.SEND_RST_STREAM: + raise StreamClosedError(self.stream_id) + else: + raise ProtocolError("Attempted to push on closed stream.") + + def send_push_on_closed_stream(self, previous_state): + """ + Called when an attempt is made to push on an already-closed stream. + + This essentially overrides the standard logic by providing a more + useful error message. It's necessary because simply indicating that the + stream is closed is not enough: there is now a new stream that is not + allowed to be there. The only recourse is to tear the whole connection + down. + """ + raise ProtocolError("Attempted to push on closed stream.") + + def window_on_closed_stream(self, previous_state): + """ + Called when a WINDOW_UPDATE frame is received on an already-closed + stream. + + If we sent an END_STREAM frame, we just ignore the frame, as instructed + in RFC 7540 Section 5.1. Technically we should eventually consider + WINDOW_UPDATE in this state an error, but we don't have access to a + clock so we just always allow it. If we closed the stream for any other + reason, we behave as we do for receiving any other frame on a closed + stream. + """ + assert self.stream_closed_by is not None + if self.stream_closed_by == StreamClosedBy.SEND_END_STREAM: + return [] + return self.recv_on_closed_stream(previous_state) + + def reset_on_closed_stream(self, previous_state): + """ + Called when a RST_STREAM frame is received on an already-closed stream. + + If we sent an END_STREAM frame, we just ignore the frame, as instructed + in RFC 7540 Section 5.1. Technically we should eventually consider + RST_STREAM in this state an error, but we don't have access to a clock + so we just always allow it. If we closed the stream for any other + reason, we behave as we do for receiving any other frame on a closed + stream. + """ + assert self.stream_closed_by is not None + if self.stream_closed_by is StreamClosedBy.SEND_END_STREAM: + return [] + return self.recv_on_closed_stream(previous_state) + + def send_informational_response(self, previous_state): + """ + Called when an informational header block is sent (that is, a block + where the :status header has a 1XX value). + + Only enforces that these are sent *before* final headers are sent. + """ + if self.headers_sent: + raise ProtocolError("Information response after final response") + event = _ResponseSent() + return [event] + + def recv_informational_response(self, previous_state): + """ + Called when an informational header block is received (that is, a block + where the :status header has a 1XX value). + """ + if self.headers_received: + raise ProtocolError("Informational response after final response") + event = InformationalResponseReceived() + event.stream_id = self.stream_id + return [event] + + def recv_alt_svc(self, previous_state): + """ + Called when receiving an ALTSVC frame. + + RFC 7838 allows us to receive ALTSVC frames at any stream state, which + is really absurdly overzealous. For that reason, we want to limit the + states in which we can actually receive it. It's really only sensible + to receive it after we've sent our own headers and before the server + has sent its header block: the server can't guarantee that we have any + state around after it completes its header block, and the server + doesn't know what origin we're talking about before we've sent ours. + + For that reason, this function applies a few extra checks on both state + and some of the little state variables we keep around. If those suggest + an unreasonable situation for the ALTSVC frame to have been sent in, + we quietly ignore it (as RFC 7838 suggests). + + This function is also *not* always called by the state machine. In some + states (IDLE, RESERVED_LOCAL, CLOSED) we don't bother to call it, + because we know the frame cannot be valid in that state (IDLE because + the server cannot know what origin the stream applies to, CLOSED + because the server cannot assume we still have state around, + RESERVED_LOCAL because by definition if we're in the RESERVED_LOCAL + state then *we* are the server). + """ + if self.client is False: + return [] + if self.headers_received: + return [] + return [ + AlternativeServiceAvailable()] + + def send_alt_svc(self, previous_state): + """ + Called when sending an ALTSVC frame on this stream. + + For consistency with the restrictions we apply on receiving ALTSVC + frames in ``recv_alt_svc``, we want to restrict when users can send + ALTSVC frames to the situations when we ourselves would accept them. + + That means: when we are a server, when we have received the request + headers, and when we have not yet sent our own response headers. + """ + if self.headers_sent: + raise ProtocolError("Cannot send ALTSVC after sending response headers.") + + +_transitions = {( + StreamState.IDLE, StreamInputs.SEND_HEADERS): ( + H2StreamStateMachine.request_sent, StreamState.OPEN), + + ( + StreamState.IDLE, StreamInputs.RECV_HEADERS): ( + H2StreamStateMachine.request_received, StreamState.OPEN), + + ( + StreamState.IDLE, StreamInputs.RECV_DATA): ( + H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + + ( + StreamState.IDLE, StreamInputs.SEND_PUSH_PROMISE): ( + H2StreamStateMachine.send_new_pushed_stream, + StreamState.RESERVED_LOCAL), + + ( + StreamState.IDLE, StreamInputs.RECV_PUSH_PROMISE): ( + H2StreamStateMachine.recv_new_pushed_stream, + StreamState.RESERVED_REMOTE), + + ( + StreamState.IDLE, StreamInputs.RECV_ALTERNATIVE_SERVICE): ( + None, StreamState.IDLE), + + ( + StreamState.IDLE, StreamInputs.UPGRADE_CLIENT): ( + H2StreamStateMachine.request_sent, StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.IDLE, StreamInputs.UPGRADE_SERVER): ( + H2StreamStateMachine.request_received, + StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.RESERVED_LOCAL, StreamInputs.SEND_HEADERS): ( + H2StreamStateMachine.response_sent, StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.RESERVED_LOCAL, StreamInputs.RECV_DATA): ( + H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + + ( + StreamState.RESERVED_LOCAL, StreamInputs.SEND_WINDOW_UPDATE): ( + None, StreamState.RESERVED_LOCAL), + + ( + StreamState.RESERVED_LOCAL, StreamInputs.RECV_WINDOW_UPDATE): ( + H2StreamStateMachine.window_updated, StreamState.RESERVED_LOCAL), + + ( + StreamState.RESERVED_LOCAL, StreamInputs.SEND_RST_STREAM): ( + H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + + ( + StreamState.RESERVED_LOCAL, StreamInputs.RECV_RST_STREAM): ( + H2StreamStateMachine.stream_reset, StreamState.CLOSED), + + ( + StreamState.RESERVED_LOCAL, StreamInputs.SEND_ALTERNATIVE_SERVICE): ( + H2StreamStateMachine.send_alt_svc, StreamState.RESERVED_LOCAL), + + ( + StreamState.RESERVED_LOCAL, StreamInputs.RECV_ALTERNATIVE_SERVICE): ( + None, StreamState.RESERVED_LOCAL), + + ( + StreamState.RESERVED_REMOTE, StreamInputs.RECV_HEADERS): ( + H2StreamStateMachine.response_received, + StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.RESERVED_REMOTE, StreamInputs.RECV_DATA): ( + H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + + ( + StreamState.RESERVED_REMOTE, StreamInputs.SEND_WINDOW_UPDATE): ( + None, StreamState.RESERVED_REMOTE), + + ( + StreamState.RESERVED_REMOTE, StreamInputs.RECV_WINDOW_UPDATE): ( + H2StreamStateMachine.window_updated, StreamState.RESERVED_REMOTE), + + ( + StreamState.RESERVED_REMOTE, StreamInputs.SEND_RST_STREAM): ( + H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + + ( + StreamState.RESERVED_REMOTE, StreamInputs.RECV_RST_STREAM): ( + H2StreamStateMachine.stream_reset, StreamState.CLOSED), + + ( + StreamState.RESERVED_REMOTE, StreamInputs.RECV_ALTERNATIVE_SERVICE): ( + H2StreamStateMachine.recv_alt_svc, StreamState.RESERVED_REMOTE), + + ( + StreamState.OPEN, StreamInputs.SEND_HEADERS): ( + H2StreamStateMachine.response_sent, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.RECV_HEADERS): ( + H2StreamStateMachine.response_received, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.SEND_DATA): ( + None, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.RECV_DATA): ( + H2StreamStateMachine.data_received, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.SEND_END_STREAM): ( + None, StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.OPEN, StreamInputs.RECV_END_STREAM): ( + H2StreamStateMachine.stream_half_closed, + StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.OPEN, StreamInputs.SEND_WINDOW_UPDATE): ( + None, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.RECV_WINDOW_UPDATE): ( + H2StreamStateMachine.window_updated, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.SEND_RST_STREAM): ( + H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + + ( + StreamState.OPEN, StreamInputs.RECV_RST_STREAM): ( + H2StreamStateMachine.stream_reset, StreamState.CLOSED), + + ( + StreamState.OPEN, StreamInputs.SEND_PUSH_PROMISE): ( + H2StreamStateMachine.send_push_promise, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.RECV_PUSH_PROMISE): ( + H2StreamStateMachine.recv_push_promise, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.SEND_INFORMATIONAL_HEADERS): ( + H2StreamStateMachine.send_informational_response, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.RECV_INFORMATIONAL_HEADERS): ( + H2StreamStateMachine.recv_informational_response, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.SEND_ALTERNATIVE_SERVICE): ( + H2StreamStateMachine.send_alt_svc, StreamState.OPEN), + + ( + StreamState.OPEN, StreamInputs.RECV_ALTERNATIVE_SERVICE): ( + H2StreamStateMachine.recv_alt_svc, StreamState.OPEN), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_HEADERS): ( + H2StreamStateMachine.response_sent, StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_HEADERS): ( + H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_DATA): ( + None, StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_DATA): ( + H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_END_STREAM): ( + H2StreamStateMachine.send_end_stream, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_WINDOW_UPDATE): ( + None, StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_WINDOW_UPDATE): ( + H2StreamStateMachine.window_updated, StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_RST_STREAM): ( + H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_RST_STREAM): ( + H2StreamStateMachine.stream_reset, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_PUSH_PROMISE): ( + H2StreamStateMachine.send_push_promise, + StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_PUSH_PROMISE): ( + H2StreamStateMachine.reset_stream_on_error, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_INFORMATIONAL_HEADERS): ( + H2StreamStateMachine.send_informational_response, + StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.SEND_ALTERNATIVE_SERVICE): ( + H2StreamStateMachine.send_alt_svc, StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.HALF_CLOSED_REMOTE, StreamInputs.RECV_ALTERNATIVE_SERVICE): ( + H2StreamStateMachine.recv_alt_svc, StreamState.HALF_CLOSED_REMOTE), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_HEADERS): ( + H2StreamStateMachine.response_received, + StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_DATA): ( + H2StreamStateMachine.data_received, StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_END_STREAM): ( + H2StreamStateMachine.stream_ended, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_WINDOW_UPDATE): ( + None, StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_WINDOW_UPDATE): ( + H2StreamStateMachine.window_updated, StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_RST_STREAM): ( + H2StreamStateMachine.send_reset_stream, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_RST_STREAM): ( + H2StreamStateMachine.stream_reset, StreamState.CLOSED), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_PUSH_PROMISE): ( + H2StreamStateMachine.recv_push_promise, + StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_INFORMATIONAL_HEADERS): ( + H2StreamStateMachine.recv_informational_response, + StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.SEND_ALTERNATIVE_SERVICE): ( + H2StreamStateMachine.send_alt_svc, StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.HALF_CLOSED_LOCAL, StreamInputs.RECV_ALTERNATIVE_SERVICE): ( + H2StreamStateMachine.recv_alt_svc, StreamState.HALF_CLOSED_LOCAL), + + ( + StreamState.CLOSED, StreamInputs.RECV_END_STREAM): ( + None, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.RECV_ALTERNATIVE_SERVICE): ( + None, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.RECV_HEADERS): ( + H2StreamStateMachine.recv_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.RECV_DATA): ( + H2StreamStateMachine.recv_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.RECV_WINDOW_UPDATE): ( + H2StreamStateMachine.window_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.RECV_RST_STREAM): ( + H2StreamStateMachine.reset_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.RECV_PUSH_PROMISE): ( + H2StreamStateMachine.recv_push_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.SEND_HEADERS): ( + H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.SEND_PUSH_PROMISE): ( + H2StreamStateMachine.send_push_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.SEND_RST_STREAM): ( + H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.SEND_DATA): ( + H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.SEND_WINDOW_UPDATE): ( + H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED), + + ( + StreamState.CLOSED, StreamInputs.SEND_END_STREAM): ( + H2StreamStateMachine.send_on_closed_stream, StreamState.CLOSED)} + +class H2Stream(object): + __doc__ = "\n A low-level HTTP/2 stream object. This handles building and receiving\n frames and maintains per-stream state.\n\n This wraps a HTTP/2 Stream state machine implementation, ensuring that\n frames can only be sent/received when the stream is in a valid state.\n Attempts to create frames that cannot be sent will raise a\n ``ProtocolError``.\n " + + def __init__(self, stream_id, config, inbound_window_size, outbound_window_size): + self.state_machine = H2StreamStateMachine(stream_id) + self.stream_id = stream_id + self.max_outbound_frame_size = None + self.request_method = None + self.outbound_flow_control_window = outbound_window_size + self._inbound_window_manager = WindowManager(inbound_window_size) + self._expected_content_length = None + self._actual_content_length = 0 + self._authority = None + self.config = config + + def __repr__(self): + return "<%s id:%d state:%r>" % ( + type(self).__name__, + self.stream_id, + self.state_machine.state) + + @property + def inbound_flow_control_window(self): + """ + The size of the inbound flow control window for the stream. This is + rarely publicly useful: instead, use :meth:`remote_flow_control_window + `. This shortcut is + largely present to provide a shortcut to this data. + """ + return self._inbound_window_manager.current_window_size + + @property + def open(self): + """ + Whether the stream is 'open' in any sense: that is, whether it counts + against the number of concurrent streams. + """ + return STREAM_OPEN[self.state_machine.state] + + @property + def closed(self): + """ + Whether the stream is closed. + """ + return self.state_machine.state == StreamState.CLOSED + + @property + def closed_by(self): + """ + Returns how the stream was closed, as one of StreamClosedBy. + """ + return self.state_machine.stream_closed_by + + def upgrade(self, client_side): + """ + Called by the connection to indicate that this stream is the initial + request/response of an upgraded connection. Places the stream into an + appropriate state. + """ + self.config.logger.debug("Upgrading %r", self) + assert self.stream_id == 1 + input_ = StreamInputs.UPGRADE_CLIENT if client_side else StreamInputs.UPGRADE_SERVER + self.state_machine.process_input(input_) + + def send_headers(self, headers, encoder, end_stream=False): + """ + Returns a list of HEADERS/CONTINUATION frames to emit as either headers + or trailers. + """ + self.config.logger.debug("Send headers %s on %r", headers, self) + try: + headers = headers.items() + warnings.warn("Implicit conversion of dictionaries to two-tuples for headers is deprecated and will be removed in 3.0.", DeprecationWarning) + except AttributeError: + headers = headers + + input_ = StreamInputs.SEND_HEADERS + if not self.state_machine.client: + if is_informational_response(headers): + if end_stream: + raise ProtocolError("Cannot set END_STREAM on informational responses.") + input_ = StreamInputs.SEND_INFORMATIONAL_HEADERS + events = self.state_machine.process_input(input_) + hf = HeadersFrame(self.stream_id) + hdr_validation_flags = self._build_hdr_validation_flags(events) + frames = self._build_headers_frames(headers, encoder, hf, hdr_validation_flags) + if end_stream: + self.state_machine.process_input(StreamInputs.SEND_END_STREAM) + frames[0].flags.add("END_STREAM") + if self.state_machine.trailers_sent: + if not end_stream: + raise ProtocolError("Trailers must have END_STREAM set.") + if self.state_machine.client: + if self._authority is None: + self._authority = authority_from_headers(headers) + self.request_method = extract_method_header(headers) + return frames + + def push_stream_in_band(self, related_stream_id, headers, encoder): + """ + Returns a list of PUSH_PROMISE/CONTINUATION frames to emit as a pushed + stream header. Called on the stream that has the PUSH_PROMISE frame + sent on it. + """ + self.config.logger.debug("Push stream %r", self) + events = self.state_machine.process_input(StreamInputs.SEND_PUSH_PROMISE) + ppf = PushPromiseFrame(self.stream_id) + ppf.promised_stream_id = related_stream_id + hdr_validation_flags = self._build_hdr_validation_flags(events) + frames = self._build_headers_frames(headers, encoder, ppf, hdr_validation_flags) + return frames + + def locally_pushed(self): + """ + Mark this stream as one that was pushed by this peer. Must be called + immediately after initialization. Sends no frames, simply updates the + state machine. + """ + events = self.state_machine.process_input(StreamInputs.SEND_PUSH_PROMISE) + assert not events + return [] + + def send_data(self, data, end_stream=False, pad_length=None): + """ + Prepare some data frames. Optionally end the stream. + + .. warning:: Does not perform flow control checks. + """ + self.config.logger.debug("Send data on %r with end stream set to %s", self, end_stream) + self.state_machine.process_input(StreamInputs.SEND_DATA) + df = DataFrame(self.stream_id) + df.data = data + if end_stream: + self.state_machine.process_input(StreamInputs.SEND_END_STREAM) + df.flags.add("END_STREAM") + if pad_length is not None: + df.flags.add("PADDED") + df.pad_length = pad_length + self.outbound_flow_control_window -= df.flow_controlled_length + assert self.outbound_flow_control_window >= 0 + return [ + df] + + def end_stream(self): + """ + End a stream without sending data. + """ + self.config.logger.debug("End stream %r", self) + self.state_machine.process_input(StreamInputs.SEND_END_STREAM) + df = DataFrame(self.stream_id) + df.flags.add("END_STREAM") + return [df] + + def advertise_alternative_service(self, field_value): + """ + Advertise an RFC 7838 alternative service. The semantics of this are + better documented in the ``H2Connection`` class. + """ + self.config.logger.debug("Advertise alternative service of %r for %r", field_value, self) + self.state_machine.process_input(StreamInputs.SEND_ALTERNATIVE_SERVICE) + asf = AltSvcFrame(self.stream_id) + asf.field = field_value + return [asf] + + def increase_flow_control_window(self, increment): + """ + Increase the size of the flow control window for the remote side. + """ + self.config.logger.debug("Increase flow control window for %r by %d", self, increment) + self.state_machine.process_input(StreamInputs.SEND_WINDOW_UPDATE) + self._inbound_window_manager.window_opened(increment) + wuf = WindowUpdateFrame(self.stream_id) + wuf.window_increment = increment + return [wuf] + + def receive_push_promise_in_band(self, promised_stream_id, headers, header_encoding): + """ + Receives a push promise frame sent on this stream, pushing a remote + stream. This is called on the stream that has the PUSH_PROMISE sent + on it. + """ + self.config.logger.debug("Receive Push Promise on %r for remote stream %d", self, promised_stream_id) + events = self.state_machine.process_input(StreamInputs.RECV_PUSH_PROMISE) + events[0].pushed_stream_id = promised_stream_id + if self.config.validate_inbound_headers: + hdr_validation_flags = self._build_hdr_validation_flags(events) + headers = validate_headers(headers, hdr_validation_flags) + if header_encoding: + headers = list(_decode_headers(headers, header_encoding)) + events[0].headers = headers + return ([], events) + + def remotely_pushed(self, pushed_headers): + """ + Mark this stream as one that was pushed by the remote peer. Must be + called immediately after initialization. Sends no frames, simply + updates the state machine. + """ + self.config.logger.debug("%r pushed by remote peer", self) + events = self.state_machine.process_input(StreamInputs.RECV_PUSH_PROMISE) + self._authority = authority_from_headers(pushed_headers) + return ([], events) + + def receive_headers(self, headers, end_stream, header_encoding): + """ + Receive a set of headers (or trailers). + """ + if is_informational_response(headers): + if end_stream: + raise ProtocolError("Cannot set END_STREAM on informational responses") + input_ = StreamInputs.RECV_INFORMATIONAL_HEADERS + else: + input_ = StreamInputs.RECV_HEADERS + events = self.state_machine.process_input(input_) + if end_stream: + es_events = self.state_machine.process_input(StreamInputs.RECV_END_STREAM) + events[0].stream_ended = es_events[0] + events += es_events + self._initialize_content_length(headers) + if isinstance(events[0], TrailersReceived): + if not end_stream: + raise ProtocolError("Trailers must have END_STREAM set") + if self.config.validate_inbound_headers: + hdr_validation_flags = self._build_hdr_validation_flags(events) + headers = validate_headers(headers, hdr_validation_flags) + if header_encoding: + headers = list(_decode_headers(headers, header_encoding)) + events[0].headers = headers + return ([], events) + + def receive_data(self, data, end_stream, flow_control_len): + """ + Receive some data. + """ + self.config.logger.debug("Receive data on %r with end stream %s and flow control length set to %d", self, end_stream, flow_control_len) + events = self.state_machine.process_input(StreamInputs.RECV_DATA) + self._inbound_window_manager.window_consumed(flow_control_len) + self._track_content_length(len(data), end_stream) + if end_stream: + es_events = self.state_machine.process_input(StreamInputs.RECV_END_STREAM) + events[0].stream_ended = es_events[0] + events.extend(es_events) + events[0].data = data + events[0].flow_controlled_length = flow_control_len + return ([], events) + + def receive_window_update(self, increment): + """ + Handle a WINDOW_UPDATE increment. + """ + self.config.logger.debug("Receive Window Update on %r for increment of %d", self, increment) + events = self.state_machine.process_input(StreamInputs.RECV_WINDOW_UPDATE) + frames = [] + if events: + events[0].delta = increment + try: + self.outbound_flow_control_window = guard_increment_window(self.outbound_flow_control_window, increment) + except FlowControlError: + event = StreamReset() + event.stream_id = self.stream_id + event.error_code = ErrorCodes.FLOW_CONTROL_ERROR + event.remote_reset = False + events = [ + event] + frames = self.reset_stream(event.error_code) + + return ( + frames, events) + + def receive_continuation(self): + """ + A naked CONTINUATION frame has been received. This is always an error, + but the type of error it is depends on the state of the stream and must + transition the state of the stream, so we need to handle it. + """ + self.config.logger.debug("Receive Continuation frame on %r", self) + self.state_machine.process_input(StreamInputs.RECV_CONTINUATION) + assert False, "Should not be reachable" + + def receive_alt_svc(self, frame): + """ + An Alternative Service frame was received on the stream. This frame + inherits the origin associated with this stream. + """ + self.config.logger.debug("Receive Alternative Service frame on stream %r", self) + if frame.origin: + return ([], []) + events = self.state_machine.process_input(StreamInputs.RECV_ALTERNATIVE_SERVICE) + if events: + assert isinstance(events[0], AlternativeServiceAvailable) + events[0].origin = self._authority + events[0].field_value = frame.field + return ([], events) + + def reset_stream(self, error_code=0): + """ + Close the stream locally. Reset the stream with an error code. + """ + self.config.logger.debug("Local reset %r with error code: %d", self, error_code) + self.state_machine.process_input(StreamInputs.SEND_RST_STREAM) + rsf = RstStreamFrame(self.stream_id) + rsf.error_code = error_code + return [rsf] + + def stream_reset(self, frame): + """ + Handle a stream being reset remotely. + """ + self.config.logger.debug("Remote reset %r with error code: %d", self, frame.error_code) + events = self.state_machine.process_input(StreamInputs.RECV_RST_STREAM) + if events: + events[0].error_code = _error_code_from_int(frame.error_code) + return ([], events) + + def acknowledge_received_data(self, acknowledged_size): + """ + The user has informed us that they've processed some amount of data + that was received on this stream. Pass that to the window manager and + potentially return some WindowUpdate frames. + """ + self.config.logger.debug("Acknowledge received data with size %d on %r", acknowledged_size, self) + increment = self._inbound_window_manager.process_bytes(acknowledged_size) + if increment: + f = WindowUpdateFrame(self.stream_id) + f.window_increment = increment + return [f] + return [] + + def _build_hdr_validation_flags(self, events): + """ + Constructs a set of header validation flags for use when normalizing + and validating header blocks. + """ + is_trailer = isinstance(events[0], (_TrailersSent, TrailersReceived)) + is_response_header = isinstance(events[0], ( + _ResponseSent, + ResponseReceived, + InformationalResponseReceived)) + is_push_promise = isinstance(events[0], (PushedStreamReceived, _PushedRequestSent)) + return HeaderValidationFlags(is_client=(self.state_machine.client), + is_trailer=is_trailer, + is_response_header=is_response_header, + is_push_promise=is_push_promise) + + def _build_headers_frames(self, headers, encoder, first_frame, hdr_validation_flags): + """ + Helper method to build headers or push promise frames. + """ + if self.config.normalize_outbound_headers: + headers = normalize_outbound_headers(headers, hdr_validation_flags) + if self.config.validate_outbound_headers: + headers = validate_outbound_headers(headers, hdr_validation_flags) + encoded_headers = encoder.encode(headers) + header_blocks = [encoded_headers[i[:i + self.max_outbound_frame_size]] for i in range(0, len(encoded_headers), self.max_outbound_frame_size)] + frames = [] + first_frame.data = header_blocks[0] + frames.append(first_frame) + for block in header_blocks[1[:None]]: + cf = ContinuationFrame(self.stream_id) + cf.data = block + frames.append(cf) + + frames[-1].flags.add("END_HEADERS") + return frames + + def _initialize_content_length(self, headers): + """ + Checks the headers for a content-length header and initializes the + _expected_content_length field from it. It's not an error for no + Content-Length header to be present. + """ + if self.request_method == b'HEAD': + self._expected_content_length = 0 + return + for n, v in headers: + if n == b'content-length': + try: + self._expected_content_length = int(v, 10) + except ValueError: + raise ProtocolError("Invalid content-length header: %s" % v) + + return + + def _track_content_length(self, length, end_stream): + """ + Update the expected content length in response to data being received. + Validates that the appropriate amount of data is sent. Always updates + the received data, but only validates the length against the + content-length header if one was sent. + + :param length: The length of the body chunk received. + :param end_stream: If this is the last body chunk received. + """ + self._actual_content_length += length + actual = self._actual_content_length + expected = self._expected_content_length + if expected is not None: + if expected < actual: + raise InvalidBodyLengthError(expected, actual) + if end_stream: + if expected != actual: + raise InvalidBodyLengthError(expected, actual) + + def _inbound_flow_control_change_from_settings(self, delta): + """ + We changed SETTINGS_INITIAL_WINDOW_SIZE, which means we need to + update the target window size for flow control. For our flow control + strategy, this means we need to do two things: we need to adjust the + current window size, but we also need to set the target maximum window + size to the new value. + """ + new_max_size = self._inbound_window_manager.max_window_size + delta + self._inbound_window_manager.window_opened(delta) + self._inbound_window_manager.max_window_size = new_max_size + + +def _decode_headers(headers, encoding): + """ + Given an iterable of header two-tuples and an encoding, decodes those + headers using that encoding while preserving the type of the header tuple. + This ensures that the use of ``HeaderTuple`` is preserved. + """ + for header in headers: + assert isinstance(header, HeaderTuple) + name, value = header + name = name.decode(encoding) + value = value.decode(encoding) + yield header.__class__(name, value) diff --git a/APPS_UNCOMPILED/lib/h2/utilities.py b/APPS_UNCOMPILED/lib/h2/utilities.py new file mode 100644 index 0000000..d94bfbc --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/utilities.py @@ -0,0 +1,430 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/utilities.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 20128 bytes +""" +h2/utilities +~~~~~~~~~~~~ + +Utility functions that do not belong in a separate module. +""" +import collections, re +from string import whitespace +import sys +from hpack import HeaderTuple, NeverIndexedHeaderTuple +from .exceptions import ProtocolError, FlowControlError +UPPER_RE = re.compile(b'[A-Z]') +CONNECTION_HEADERS = frozenset([ + b'connection', 'connection', + b'proxy-connection', 'proxy-connection', + b'keep-alive', + 'keep-alive', + b'transfer-encoding', 'transfer-encoding', + b'upgrade', + 'upgrade']) +_ALLOWED_PSEUDO_HEADER_FIELDS = frozenset([ + b':method', ':method', + b':scheme', ':scheme', + b':authority', ':authority', + b':path', + ':path', + b':status', ':status']) +_SECURE_HEADERS = frozenset([ + b'authorization', "authorization", + b'proxy-authorization', "proxy-authorization"]) +_REQUEST_ONLY_HEADERS = frozenset([ + b':scheme', ':scheme', + b':path', ':path', + b':authority', ':authority', + b':method', + ':method']) +_RESPONSE_ONLY_HEADERS = frozenset([b':status', ":status"]) +if sys.version_info[0] == 2: + _WHITESPACE = frozenset(whitespace) +else: + _WHITESPACE = frozenset(map(ord, whitespace)) + +def _secure_headers(headers, hdr_validation_flags): + """ + Certain headers are at risk of being attacked during the header compression + phase, and so need to be kept out of header compression contexts. This + function automatically transforms certain specific headers into HPACK + never-indexed fields to ensure they don't get added to header compression + contexts. + + This function currently implements two rules: + + - 'authorization' and 'proxy-authorization' fields are automatically made + never-indexed. + - Any 'cookie' header field shorter than 20 bytes long is made + never-indexed. + + These fields are the most at-risk. These rules are inspired by Firefox + and nghttp2. + """ + for header in headers: + if header[0] in _SECURE_HEADERS: + yield NeverIndexedHeaderTuple(*header) + elif header[0] in (b'cookie', 'cookie') and len(header[1]) < 20: + yield NeverIndexedHeaderTuple(*header) + else: + yield header + + +def extract_method_header(headers): + """ + Extracts the request method from the headers list. + """ + for k, v in headers: + if k in (b':method', ':method'): + if not isinstance(v, bytes): + return v.encode("utf-8") + return v + + +def is_informational_response(headers): + """ + Searches a header block for a :status header to confirm that a given + collection of headers are an informational response. Assumes the header + block is well formed: that is, that the HTTP/2 special headers are first + in the block, and so that it can stop looking when it finds the first + header field whose name does not begin with a colon. + + :param headers: The HTTP/2 header block. + :returns: A boolean indicating if this is an informational response. + """ + for n, v in headers: + if isinstance(n, bytes): + sigil = b':' + status = b':status' + informational_start = b'1' + else: + sigil = ":" + status = ":status" + informational_start = "1" + if not n.startswith(sigil): + return False + if n != status: + continue + return v.startswith(informational_start) + + +def guard_increment_window(current, increment): + """ + Increments a flow control window, guarding against that window becoming too + large. + + :param current: The current value of the flow control window. + :param increment: The increment to apply to that window. + :returns: The new value of the window. + :raises: ``FlowControlError`` + """ + LARGEST_FLOW_CONTROL_WINDOW = 2147483647 + new_size = current + increment + if new_size > LARGEST_FLOW_CONTROL_WINDOW: + raise FlowControlError("May not increment flow control window past %d" % LARGEST_FLOW_CONTROL_WINDOW) + return new_size + + +def authority_from_headers(headers): + """ + Given a header set, searches for the authority header and returns the + value. + + Note that this doesn't terminate early, so should only be called if the + headers are for a client request. Otherwise, will loop over the entire + header set, which is potentially unwise. + + :param headers: The HTTP header set. + :returns: The value of the authority header, or ``None``. + :rtype: ``bytes`` or ``None``. + """ + for n, v in headers: + if n in (b':authority', ':authority'): + if not isinstance(v, bytes): + return v.encode("utf-8") + return v + + +HeaderValidationFlags = collections.namedtuple("HeaderValidationFlags", [ + "is_client", "is_trailer", "is_response_header", "is_push_promise"]) + +def validate_headers(headers, hdr_validation_flags): + """ + Validates a header sequence against a set of constraints from RFC 7540. + + :param headers: The HTTP header set. + :param hdr_validation_flags: An instance of HeaderValidationFlags. + """ + headers = _reject_uppercase_header_fields(headers, hdr_validation_flags) + headers = _reject_surrounding_whitespace(headers, hdr_validation_flags) + headers = _reject_te(headers, hdr_validation_flags) + headers = _reject_connection_header(headers, hdr_validation_flags) + headers = _reject_pseudo_header_fields(headers, hdr_validation_flags) + headers = _check_host_authority_header(headers, hdr_validation_flags) + headers = _check_path_header(headers, hdr_validation_flags) + return list(headers) + + +def _reject_uppercase_header_fields(headers, hdr_validation_flags): + """ + Raises a ProtocolError if any uppercase character is found in a header + block. + """ + for header in headers: + if UPPER_RE.search(header[0]): + raise ProtocolError("Received uppercase header name %s." % header[0]) + yield header + + +def _reject_surrounding_whitespace(headers, hdr_validation_flags): + """ + Raises a ProtocolError if any header name or value is surrounded by + whitespace characters. + """ + for header in headers: + if not header[0][0] in _WHITESPACE: + if header[0][-1] in _WHITESPACE: + raise ProtocolError("Received header name surrounded by whitespace %r" % header[0]) + if header[1]: + if header[1][0] in _WHITESPACE or header[1][-1] in _WHITESPACE: + raise ProtocolError("Received header value surrounded by whitespace %r" % header[1]) + yield header + + +def _reject_te(headers, hdr_validation_flags): + """ + Raises a ProtocolError if the TE header is present in a header block and + its value is anything other than "trailers". + """ + for header in headers: + if header[0] in (b'te', 'te'): + if header[1].lower() not in (b'trailers', 'trailers'): + raise ProtocolError("Invalid value for Transfer-Encoding header: %s" % header[1]) + yield header + + +def _reject_connection_header(headers, hdr_validation_flags): + """ + Raises a ProtocolError if the Connection header is present in a header + block. + """ + for header in headers: + if header[0] in CONNECTION_HEADERS: + raise ProtocolError("Connection-specific header field present: %s." % header[0]) + yield header + + +def _custom_startswith(test_string, bytes_prefix, unicode_prefix): + """ + Given a string that might be a bytestring or a Unicode string, + return True if it starts with the appropriate prefix. + """ + if isinstance(test_string, bytes): + return test_string.startswith(bytes_prefix) + return test_string.startswith(unicode_prefix) + + +def _assert_header_in_set(string_header, bytes_header, header_set): + """ + Given a set of header names, checks whether the string or byte version of + the header name is present. Raises a Protocol error with the appropriate + error if it's missing. + """ + if not string_header in header_set: + if not bytes_header in header_set: + raise ProtocolError("Header block missing mandatory %s header" % string_header) + + +def _reject_pseudo_header_fields(headers, hdr_validation_flags): + """ + Raises a ProtocolError if duplicate pseudo-header fields are found in a + header block or if a pseudo-header field appears in a block after an + ordinary header field. + + Raises a ProtocolError if pseudo-header fields are found in trailers. + """ + seen_pseudo_header_fields = set() + seen_regular_header = False + for header in headers: + if _custom_startswith(header[0], b':', ":"): + if header[0] in seen_pseudo_header_fields: + raise ProtocolError("Received duplicate pseudo-header field %s" % header[0]) + seen_pseudo_header_fields.add(header[0]) + if seen_regular_header: + raise ProtocolError("Received pseudo-header field out of sequence: %s" % header[0]) + if header[0] not in _ALLOWED_PSEUDO_HEADER_FIELDS: + raise ProtocolError("Received custom pseudo-header field %s" % header[0]) + else: + seen_regular_header = True + yield header + + _check_pseudo_header_field_acceptability(seen_pseudo_header_fields, hdr_validation_flags) + + +def _check_pseudo_header_field_acceptability(pseudo_headers, hdr_validation_flags): + """ + Given the set of pseudo-headers present in a header block and the + validation flags, confirms that RFC 7540 allows them. + """ + if hdr_validation_flags.is_trailer: + if pseudo_headers: + raise ProtocolError("Received pseudo-header in trailer %s" % pseudo_headers) + elif hdr_validation_flags.is_response_header: + _assert_header_in_set(":status", b':status', pseudo_headers) + invalid_response_headers = pseudo_headers & _REQUEST_ONLY_HEADERS + if invalid_response_headers: + raise ProtocolError("Encountered request-only headers %s" % invalid_response_headers) + elif not hdr_validation_flags.is_response_header: + if not hdr_validation_flags.is_trailer: + _assert_header_in_set(":path", b':path', pseudo_headers) + _assert_header_in_set(":method", b':method', pseudo_headers) + _assert_header_in_set(":scheme", b':scheme', pseudo_headers) + invalid_request_headers = pseudo_headers & _RESPONSE_ONLY_HEADERS + if invalid_request_headers: + raise ProtocolError("Encountered response-only headers %s" % invalid_request_headers) + + +def _validate_host_authority_header(headers): + """ + Given the :authority and Host headers from a request block that isn't + a trailer, check that: + 1. At least one of these headers is set. + 2. If both headers are set, they match. + + :param headers: The HTTP header set. + :raises: ``ProtocolError`` + """ + authority_header_val = None + host_header_val = None + for header in headers: + if header[0] in (b':authority', ':authority'): + authority_header_val = header[1] + else: + if header[0] in (b'host', 'host'): + host_header_val = header[1] + yield header + + authority_present = authority_header_val is not None + host_present = host_header_val is not None + if not authority_present: + if not host_present: + raise ProtocolError("Request header block does not have an :authority or Host header.") + if authority_present: + if host_present: + if authority_header_val != host_header_val: + raise ProtocolError("Request header block has mismatched :authority and Host headers: %r / %r" % ( + authority_header_val, host_header_val)) + + +def _check_host_authority_header(headers, hdr_validation_flags): + """ + Raises a ProtocolError if a header block arrives that does not contain an + :authority or a Host header, or if a header block contains both fields, + but their values do not match. + """ + skip_validation = hdr_validation_flags.is_response_header or hdr_validation_flags.is_trailer + if skip_validation: + return headers + return _validate_host_authority_header(headers) + + +def _check_path_header(headers, hdr_validation_flags): + """ + Raise a ProtocolError if a header block arrives or is sent that contains an + empty :path header. + """ + + def inner(): + for header in headers: + if header[0] in (b':path', ':path'): + if not header[1]: + raise ProtocolError("An empty :path header is forbidden") + yield header + + skip_validation = hdr_validation_flags.is_response_header or hdr_validation_flags.is_trailer + if skip_validation: + return headers + return inner() + + +def _lowercase_header_names(headers, hdr_validation_flags): + """ + Given an iterable of header two-tuples, rebuilds that iterable with the + header names lowercased. This generator produces tuples that preserve the + original type of the header tuple for tuple and any ``HeaderTuple``. + """ + for header in headers: + if isinstance(header, HeaderTuple): + yield header.__class__(header[0].lower(), header[1]) + else: + yield ( + header[0].lower(), header[1]) + + +def _strip_surrounding_whitespace(headers, hdr_validation_flags): + """ + Given an iterable of header two-tuples, strip both leading and trailing + whitespace from both header names and header values. This generator + produces tuples that preserve the original type of the header tuple for + tuple and any ``HeaderTuple``. + """ + for header in headers: + if isinstance(header, HeaderTuple): + yield header.__class__(header[0].strip(), header[1].strip()) + else: + yield ( + header[0].strip(), header[1].strip()) + + +def _strip_connection_headers(headers, hdr_validation_flags): + """ + Strip any connection headers as per RFC7540 § 8.1.2.2. + """ + for header in headers: + if header[0] not in CONNECTION_HEADERS: + yield header + + +def _check_sent_host_authority_header(headers, hdr_validation_flags): + """ + Raises an InvalidHeaderBlockError if we try to send a header block + that does not contain an :authority or a Host header, or if + the header block contains both fields, but their values do not match. + """ + skip_validation = hdr_validation_flags.is_response_header or hdr_validation_flags.is_trailer + if skip_validation: + return headers + return _validate_host_authority_header(headers) + + +def normalize_outbound_headers(headers, hdr_validation_flags): + """ + Normalizes a header sequence that we are about to send. + + :param headers: The HTTP header set. + :param hdr_validation_flags: An instance of HeaderValidationFlags. + """ + headers = _lowercase_header_names(headers, hdr_validation_flags) + headers = _strip_surrounding_whitespace(headers, hdr_validation_flags) + headers = _strip_connection_headers(headers, hdr_validation_flags) + headers = _secure_headers(headers, hdr_validation_flags) + return headers + + +def validate_outbound_headers(headers, hdr_validation_flags): + """ + Validates and normalizes a header sequence that we are about to send. + + :param headers: The HTTP header set. + :param hdr_validation_flags: An instance of HeaderValidationFlags. + """ + headers = _reject_te(headers, hdr_validation_flags) + headers = _reject_connection_header(headers, hdr_validation_flags) + headers = _reject_pseudo_header_fields(headers, hdr_validation_flags) + headers = _check_sent_host_authority_header(headers, hdr_validation_flags) + headers = _check_path_header(headers, hdr_validation_flags) + return headers diff --git a/APPS_UNCOMPILED/lib/h2/windows.py b/APPS_UNCOMPILED/lib/h2/windows.py new file mode 100644 index 0000000..0e7f86a --- /dev/null +++ b/APPS_UNCOMPILED/lib/h2/windows.py @@ -0,0 +1,123 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/h2/windows.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5603 bytes +""" +h2/windows +~~~~~~~~~~ + +Defines tools for managing HTTP/2 flow control windows. + +The objects defined in this module are used to automatically manage HTTP/2 +flow control windows. Specifically, they keep track of what the size of the +window is, how much data has been consumed from that window, and how much data +the user has already used. It then implements a basic algorithm that attempts +to manage the flow control window without user input, trying to ensure that it +does not emit too many WINDOW_UPDATE frames. +""" +from __future__ import division +from .exceptions import FlowControlError +LARGEST_FLOW_CONTROL_WINDOW = 2147483647 + +class WindowManager(object): + __doc__ = "\n A basic HTTP/2 window manager.\n\n :param max_window_size: The maximum size of the flow control window.\n :type max_window_size: ``int``\n " + + def __init__(self, max_window_size): + assert max_window_size <= LARGEST_FLOW_CONTROL_WINDOW + self.max_window_size = max_window_size + self.current_window_size = max_window_size + self._bytes_processed = 0 + + def window_consumed(self, size): + """ + We have received a certain number of bytes from the remote peer. This + necessarily shrinks the flow control window! + + :param size: The number of flow controlled bytes we received from the + remote peer. + :type size: ``int`` + :returns: Nothing. + :rtype: ``None`` + """ + self.current_window_size -= size + if self.current_window_size < 0: + raise FlowControlError("Flow control window shrunk below 0") + + def window_opened(self, size): + """ + The flow control window has been incremented, either because of manual + flow control management or because of the user changing the flow + control settings. This can have the effect of increasing what we + consider to be the "maximum" flow control window size. + + This does not increase our view of how many bytes have been processed, + only of how much space is in the window. + + :param size: The increment to the flow control window we received. + :type size: ``int`` + :returns: Nothing + :rtype: ``None`` + """ + self.current_window_size += size + if self.current_window_size > LARGEST_FLOW_CONTROL_WINDOW: + raise FlowControlError("Flow control window mustn't exceed %d" % LARGEST_FLOW_CONTROL_WINDOW) + if self.current_window_size > self.max_window_size: + self.max_window_size = self.current_window_size + + def process_bytes(self, size): + """ + The application has informed us that it has processed a certain number + of bytes. This may cause us to want to emit a window update frame. If + we do want to emit a window update frame, this method will return the + number of bytes that we should increment the window by. + + :param size: The number of flow controlled bytes that the application + has processed. + :type size: ``int`` + :returns: The number of bytes to increment the flow control window by, + or ``None``. + :rtype: ``int`` or ``None`` + """ + self._bytes_processed += size + return self._maybe_update_window() + + def _maybe_update_window(self): + """ + Run the algorithm. + + Our current algorithm can be described like this. + + 1. If no bytes have been processed, we immediately return 0. There is + no meaningful way for us to hand space in the window back to the + remote peer, so let's not even try. + 2. If there is no space in the flow control window, and we have + processed at least 1024 bytes (or 1/4 of the window, if the window + is smaller), we will emit a window update frame. This is to avoid + the risk of blocking a stream altogether. + 3. If there is space in the flow control window, and we have processed + at least 1/2 of the window worth of bytes, we will emit a window + update frame. This is to minimise the number of window update frames + we have to emit. + + In a healthy system with large flow control windows, this will + irregularly emit WINDOW_UPDATE frames. This prevents us starving the + connection by emitting eleventy bajillion WINDOW_UPDATE frames, + especially in situations where the remote peer is sending a lot of very + small DATA frames. + """ + if not self._bytes_processed: + return + max_increment = self.max_window_size - self.current_window_size + increment = 0 + if self.current_window_size == 0 and self._bytes_processed > min(1024, self.max_window_size // 4): + increment = min(self._bytes_processed, max_increment) + self._bytes_processed = 0 + else: + if self._bytes_processed >= self.max_window_size // 2: + increment = min(self._bytes_processed, max_increment) + self._bytes_processed = 0 + self.current_window_size += increment + return increment diff --git a/APPS_UNCOMPILED/lib/hpack/__init__.py b/APPS_UNCOMPILED/lib/hpack/__init__.py new file mode 100644 index 0000000..9e60b6d --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/__init__.py @@ -0,0 +1,22 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/__init__.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 479 bytes +""" +hpack +~~~~~ + +HTTP/2 header encoding for Python. +""" +from .hpack import Encoder, Decoder +from .struct import HeaderTuple, NeverIndexedHeaderTuple +from .exceptions import HPACKError, HPACKDecodingError, InvalidTableIndex, OversizedHeaderListError +__all__ = [ + 'Encoder', 'Decoder', 'HPACKError', 'HPACKDecodingError', + 'InvalidTableIndex', + 'HeaderTuple', 'NeverIndexedHeaderTuple', + 'OversizedHeaderListError'] +__version__ = "3.0.0" diff --git a/APPS_UNCOMPILED/lib/hpack/compat.py b/APPS_UNCOMPILED/lib/hpack/compat.py new file mode 100644 index 0000000..114ace2 --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/compat.py @@ -0,0 +1,52 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/compat.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 679 bytes +""" +hpack/compat +~~~~~~~~~~~~ + +Normalizes the Python 2/3 API for internal use. +""" +import sys +_ver = sys.version_info +is_py2 = _ver[0] == 2 +is_py3 = _ver[0] == 3 +if is_py2: + + def to_byte(char): + return ord(char) + + + def decode_hex(b): + return b.decode("hex") + + + def to_bytes(b): + if isinstance(b, memoryview): + return b.tobytes() + return bytes(b) + + + unicode = unicode + bytes = str +else: + if is_py3: + + def to_byte(char): + return char + + + def decode_hex(b): + return bytes.fromhex(b) + + + def to_bytes(b): + return bytes(b) + + + unicode = str + bytes = bytes diff --git a/APPS_UNCOMPILED/lib/hpack/exceptions.py b/APPS_UNCOMPILED/lib/hpack/exceptions.py new file mode 100644 index 0000000..b1ca610 --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/exceptions.py @@ -0,0 +1,32 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/exceptions.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 974 bytes +""" +hyper/http20/exceptions +~~~~~~~~~~~~~~~~~~~~~~~ + +This defines exceptions used in the HTTP/2 portion of hyper. +""" + +class HPACKError(Exception): + __doc__ = "\n The base class for all ``hpack`` exceptions.\n " + + +class HPACKDecodingError(HPACKError): + __doc__ = "\n An error has been encountered while performing HPACK decoding.\n " + + +class InvalidTableIndex(HPACKDecodingError): + __doc__ = "\n An invalid table index was received.\n " + + +class OversizedHeaderListError(HPACKDecodingError): + __doc__ = "\n A header list that was larger than we allow has been received. This may be\n a DoS attack.\n\n .. versionadded:: 2.3.0\n " + + +class InvalidTableSizeError(HPACKDecodingError): + __doc__ = "\n An attempt was made to change the decoder table size to a value larger than\n allowed, or the list was shrunk and the remote peer didn't shrink their\n table size.\n\n .. versionadded:: 3.0.0\n " diff --git a/APPS_UNCOMPILED/lib/hpack/hpack.py b/APPS_UNCOMPILED/lib/hpack/hpack.py new file mode 100644 index 0000000..3c9d422 --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/hpack.py @@ -0,0 +1,443 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/hpack.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 22772 bytes +""" +hpack/hpack +~~~~~~~~~~~ + +Implements the HPACK header compression algorithm as detailed by the IETF. +""" +import logging +from .table import HeaderTable, table_entry_size +from .compat import to_byte, to_bytes +from .exceptions import HPACKDecodingError, OversizedHeaderListError, InvalidTableSizeError +from .huffman import HuffmanEncoder +from .huffman_constants import REQUEST_CODES, REQUEST_CODES_LENGTH +from .huffman_table import decode_huffman +from .struct import HeaderTuple, NeverIndexedHeaderTuple +log = logging.getLogger(__name__) +INDEX_NONE = b'\x00' +INDEX_NEVER = b'\x10' +INDEX_INCREMENTAL = b'@' +_PREFIX_BIT_MAX_NUMBERS = [2 ** i - 1 for i in range(9)] +try: + basestring = basestring +except NameError: + basestring = ( + str, bytes) + +DEFAULT_MAX_HEADER_LIST_SIZE = 65536 + +def _unicode_if_needed(header, raw): + """ + Provides a header as a unicode string if raw is False, otherwise returns + it as a bytestring. + """ + name = to_bytes(header[0]) + value = to_bytes(header[1]) + if not raw: + name = name.decode("utf-8") + value = value.decode("utf-8") + return header.__class__(name, value) + + +def encode_integer(integer, prefix_bits): + """ + This encodes an integer according to the wacky integer encoding rules + defined in the HPACK spec. + """ + log.debug("Encoding %d with %d bits", integer, prefix_bits) + if integer < 0: + raise ValueError("Can only encode positive integers, got %s" % integer) + if prefix_bits < 1 or prefix_bits > 8: + raise ValueError("Prefix bits must be between 1 and 8, got %s" % prefix_bits) + max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits] + if integer < max_number: + return bytearray([integer]) + elements = [max_number] + integer -= max_number + while integer >= 128: + elements.append((integer & 127) + 128) + integer >>= 7 + + elements.append(integer) + return bytearray(elements) + + +def decode_integer(data, prefix_bits): + """ + This decodes an integer according to the wacky integer encoding rules + defined in the HPACK spec. Returns a tuple of the decoded integer and the + number of bytes that were consumed from ``data`` in order to get that + integer. + """ + if prefix_bits < 1 or prefix_bits > 8: + raise ValueError("Prefix bits must be between 1 and 8, got %s" % prefix_bits) + max_number = _PREFIX_BIT_MAX_NUMBERS[prefix_bits] + index = 1 + shift = 0 + mask = 255 >> 8 - prefix_bits + try: + number = to_byte(data[0]) & mask + if number == max_number: + while True: + next_byte = to_byte(data[index]) + index += 1 + if next_byte >= 128: + number += next_byte - 128 << shift + else: + number += next_byte << shift + break + shift += 7 + + except IndexError: + raise HPACKDecodingError("Unable to decode HPACK integer representation from %r" % data) + + log.debug("Decoded %d, consumed %d bytes", number, index) + return ( + number, index) + + +def _dict_to_iterable(header_dict): + """ + This converts a dictionary to an iterable of two-tuples. This is a + HPACK-specific function becuase it pulls "special-headers" out first and + then emits them. + """ + assert isinstance(header_dict, dict) + keys = sorted((header_dict.keys()), + key=(lambda k: not _to_bytes(k).startswith(b':'))) + for key in keys: + yield (key, header_dict[key]) + + +def _to_bytes(string): + """ + Convert string to bytes. + """ + if not isinstance(string, basestring): + string = str(string) + if isinstance(string, bytes): + return string + return string.encode("utf-8") + + +class Encoder(object): + __doc__ = "\n An HPACK encoder object. This object takes HTTP headers and emits encoded\n HTTP/2 header blocks.\n " + + def __init__(self): + self.header_table = HeaderTable() + self.huffman_coder = HuffmanEncoder(REQUEST_CODES, REQUEST_CODES_LENGTH) + self.table_size_changes = [] + + @property + def header_table_size(self): + """ + Controls the size of the HPACK header table. + """ + return self.header_table.maxsize + + @header_table_size.setter + def header_table_size(self, value): + self.header_table.maxsize = value + if self.header_table.resized: + self.table_size_changes.append(value) + + def encode(self, headers, huffman=True): + """ + Takes a set of headers and encodes them into a HPACK-encoded header + block. + + :param headers: The headers to encode. Must be either an iterable of + tuples, an iterable of :class:`HeaderTuple + `, or a ``dict``. + + If an iterable of tuples, the tuples may be either + two-tuples or three-tuples. If they are two-tuples, the + tuples must be of the format ``(name, value)``. If they + are three-tuples, they must be of the format + ``(name, value, sensitive)``, where ``sensitive`` is a + boolean value indicating whether the header should be + added to header tables anywhere. If not present, + ``sensitive`` defaults to ``False``. + + If an iterable of :class:`HeaderTuple + `, the tuples must always be + two-tuples. Instead of using ``sensitive`` as a third + tuple entry, use :class:`NeverIndexedHeaderTuple + ` to request that + the field never be indexed. + + .. warning:: HTTP/2 requires that all special headers + (headers whose names begin with ``:`` characters) + appear at the *start* of the header block. While + this method will ensure that happens for ``dict`` + subclasses, callers using any other iterable of + tuples **must** ensure they place their special + headers at the start of the iterable. + + For efficiency reasons users should prefer to use + iterables of two-tuples: fixing the ordering of + dictionary headers is an expensive operation that + should be avoided if possible. + + :param huffman: (optional) Whether to Huffman-encode any header sent as + a literal value. Except for use when debugging, it is + recommended that this be left enabled. + + :returns: A bytestring containing the HPACK-encoded header block. + """ + log.debug("HPACK encoding %s", headers) + header_block = [] + if isinstance(headers, dict): + headers = _dict_to_iterable(headers) + if self.header_table.resized: + header_block.append(self._encode_table_size_change()) + self.header_table.resized = False + for header in headers: + sensitive = False + if isinstance(header, HeaderTuple): + sensitive = not header.indexable + else: + if len(header) > 2: + sensitive = header[2] + header = ( + _to_bytes(header[0]), _to_bytes(header[1])) + header_block.append(self.add(header, sensitive, huffman)) + + header_block = (b'').join(header_block) + log.debug("Encoded header block to %s", header_block) + return header_block + + def add(self, to_add, sensitive, huffman=False): + """ + This function takes a header key-value tuple and serializes it. + """ + log.debug("Adding %s to the header table", to_add) + name, value = to_add + indexbit = INDEX_INCREMENTAL if not sensitive else INDEX_NEVER + match = self.header_table.search(name, value) + if match is None: + encoded = self._encode_literal(name, value, indexbit, huffman) + if not sensitive: + self.header_table.add(name, value) + return encoded + index, name, perfect = match + if perfect: + encoded = self._encode_indexed(index) + else: + encoded = self._encode_indexed_literal(index, value, indexbit, huffman) + if not sensitive: + self.header_table.add(name, value) + return encoded + + def _encode_indexed(self, index): + """ + Encodes a header using the indexed representation. + """ + field = encode_integer(index, 7) + field[0] |= 128 + return bytes(field) + + def _encode_literal(self, name, value, indexbit, huffman=False): + """ + Encodes a header with a literal name and literal value. If ``indexing`` + is True, the header will be added to the header table: otherwise it + will not. + """ + if huffman: + name = self.huffman_coder.encode(name) + value = self.huffman_coder.encode(value) + name_len = encode_integer(len(name), 7) + value_len = encode_integer(len(value), 7) + if huffman: + name_len[0] |= 128 + value_len[0] |= 128 + return (b'').join([ + indexbit, bytes(name_len), name, bytes(value_len), value]) + + def _encode_indexed_literal(self, index, value, indexbit, huffman=False): + """ + Encodes a header with an indexed name and a literal value and performs + incremental indexing. + """ + if indexbit != INDEX_INCREMENTAL: + prefix = encode_integer(index, 4) + else: + prefix = encode_integer(index, 6) + prefix[0] |= ord(indexbit) + if huffman: + value = self.huffman_coder.encode(value) + value_len = encode_integer(len(value), 7) + if huffman: + value_len[0] |= 128 + return (b'').join([bytes(prefix), bytes(value_len), value]) + + def _encode_table_size_change(self): + """ + Produces the encoded form of all header table size change context + updates. + """ + block = b'' + for size_bytes in self.table_size_changes: + size_bytes = encode_integer(size_bytes, 5) + size_bytes[0] |= 32 + block += bytes(size_bytes) + + self.table_size_changes = [] + return block + + +class Decoder(object): + __doc__ = "\n An HPACK decoder object.\n\n .. versionchanged:: 2.3.0\n Added ``max_header_list_size`` argument.\n\n :param max_header_list_size: The maximum decompressed size we will allow\n for any single header block. This is a protection against DoS attacks\n that attempt to force the application to expand a relatively small\n amount of data into a really large header list, allowing enormous\n amounts of memory to be allocated.\n\n If this amount of data is exceeded, a `OversizedHeaderListError\n ` exception will be raised. At this\n point the connection should be shut down, as the HPACK state will no\n longer be useable.\n\n Defaults to 64kB.\n :type max_header_list_size: ``int``\n " + + def __init__(self, max_header_list_size=DEFAULT_MAX_HEADER_LIST_SIZE): + self.header_table = HeaderTable() + self.max_header_list_size = max_header_list_size + self.max_allowed_table_size = self.header_table.maxsize + + @property + def header_table_size(self): + """ + Controls the size of the HPACK header table. + """ + return self.header_table.maxsize + + @header_table_size.setter + def header_table_size(self, value): + self.header_table.maxsize = value + + def decode(self, data, raw=False): + """ + Takes an HPACK-encoded header block and decodes it into a header set. + + :param data: A bytestring representing a complete HPACK-encoded header + block. + :param raw: (optional) Whether to return the headers as tuples of raw + byte strings or to decode them as UTF-8 before returning + them. The default value is False, which returns tuples of + Unicode strings + :returns: A list of two-tuples of ``(name, value)`` representing the + HPACK-encoded headers, in the order they were decoded. + :raises HPACKDecodingError: If an error is encountered while decoding + the header block. + """ + log.debug("Decoding %s", data) + data_mem = memoryview(data) + headers = [] + data_len = len(data) + inflated_size = 0 + current_index = 0 + while current_index < data_len: + current = to_byte(data[current_index]) + indexed = True if current & 128 else False + literal_index = True if current & 64 else False + encoding_update = True if current & 32 else False + if indexed: + header, consumed = self._decode_indexed(data_mem[current_index[:None]]) + else: + if literal_index: + header, consumed = self._decode_literal_index(data_mem[current_index[:None]]) + else: + if encoding_update: + if headers: + raise HPACKDecodingError("Table size update not at the start of the block") + consumed = self._update_encoding_context(data_mem[current_index[:None]]) + header = None + else: + header, consumed = self._decode_literal_no_index(data_mem[current_index[:None]]) + if header: + headers.append(header) + inflated_size += table_entry_size(*header) + if inflated_size > self.max_header_list_size: + raise OversizedHeaderListError("A header list larger than %d has been received" % self.max_header_list_size) + current_index += consumed + + self._assert_valid_table_size() + try: + return [_unicode_if_needed(h, raw) for h in headers] + except UnicodeDecodeError: + raise HPACKDecodingError("Unable to decode headers as UTF-8.") + + def _assert_valid_table_size(self): + """ + Check that the table size set by the encoder is lower than the maximum + we expect to have. + """ + if self.header_table_size > self.max_allowed_table_size: + raise InvalidTableSizeError("Encoder did not shrink table size to within the max") + + def _update_encoding_context(self, data): + """ + Handles a byte that updates the encoding context. + """ + new_size, consumed = decode_integer(data, 5) + if new_size > self.max_allowed_table_size: + raise InvalidTableSizeError("Encoder exceeded max allowable table size") + self.header_table_size = new_size + return consumed + + def _decode_indexed(self, data): + """ + Decodes a header represented using the indexed representation. + """ + index, consumed = decode_integer(data, 7) + header = HeaderTuple(*self.header_table.get_by_index(index)) + log.debug("Decoded %s, consumed %d", header, consumed) + return (header, consumed) + + def _decode_literal_no_index(self, data): + return self._decode_literal(data, False) + + def _decode_literal_index(self, data): + return self._decode_literal(data, True) + + def _decode_literal(self, data, should_index): + """ + Decodes a header represented with a literal. + """ + total_consumed = 0 + if should_index: + indexed_name = to_byte(data[0]) & 63 + name_len = 6 + not_indexable = False + else: + high_byte = to_byte(data[0]) + indexed_name = high_byte & 15 + name_len = 4 + not_indexable = high_byte & 16 + if indexed_name: + index, consumed = decode_integer(data, name_len) + name = self.header_table.get_by_index(index)[0] + total_consumed = consumed + length = 0 + else: + data = data[1[:None]] + length, consumed = decode_integer(data, 7) + name = data[consumed[:consumed + length]] + if len(name) != length: + raise HPACKDecodingError("Truncated header block") + if to_byte(data[0]) & 128: + name = decode_huffman(name) + total_consumed = consumed + length + 1 + data = data[(consumed + length)[:None]] + length, consumed = decode_integer(data, 7) + value = data[consumed[:consumed + length]] + if len(value) != length: + raise HPACKDecodingError("Truncated header block") + elif to_byte(data[0]) & 128: + value = decode_huffman(value) + total_consumed += length + consumed + if not_indexable: + header = NeverIndexedHeaderTuple(name, value) + else: + header = HeaderTuple(name, value) + if should_index: + self.header_table.add(name, value) + log.debug("Decoded %s, total consumed %d bytes, indexed %s", header, total_consumed, should_index) + return ( + header, total_consumed) diff --git a/APPS_UNCOMPILED/lib/hpack/hpack_compat.py b/APPS_UNCOMPILED/lib/hpack/hpack_compat.py new file mode 100644 index 0000000..92dd9af --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/hpack_compat.py @@ -0,0 +1,101 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/hpack_compat.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 3596 bytes +""" +hpack/hpack_compat +~~~~~~~~~~~~~~~~~~ + +Provides an abstraction layer over two HPACK implementations. + +This module has a pure-Python greenfield HPACK implementation that can be used +on all Python platforms. However, this implementation is both slower and more +memory-hungry than could be achieved with a C-language version. Additionally, +nghttp2's HPACK implementation currently achieves better compression ratios +than hyper's in almost all benchmarks. + +For those who care about efficiency and speed in HPACK, this module allows you +to use nghttp2's HPACK implementation instead of ours. This module detects +whether the nghttp2 bindings are installed, and if they are it wraps them in +a hpack-compatible API and uses them instead of its own. If not, it falls back +to the built-in Python bindings. +""" +import logging +from .hpack import _to_bytes +log = logging.getLogger(__name__) +try: + import nghttp2 + USE_NGHTTP2 = True + log.debug("Using nghttp2's HPACK implementation.") +except ImportError: + USE_NGHTTP2 = False + log.debug("Using our pure-Python HPACK implementation.") + +if USE_NGHTTP2: + + class Encoder(object): + __doc__ = "\n An HPACK encoder object. This object takes HTTP headers and emits\n encoded HTTP/2 header blocks.\n " + + def __init__(self): + self._e = nghttp2.HDDeflater() + + @property + def header_table_size(self): + """ + Returns the header table size. For the moment this isn't + useful, so we don't use it. + """ + raise NotImplementedError() + + @header_table_size.setter + def header_table_size(self, value): + log.debug("Setting header table size to %d", value) + self._e.change_table_size(value) + + def encode(self, headers, huffman=True): + """ + Encode the headers. The huffman parameter has no effect, it is + simply present for compatibility. + """ + log.debug("HPACK encoding %s", headers) + if isinstance(headers, dict): + headers = headers.items() + headers = [(_to_bytes(n), _to_bytes(v)) for n, v in headers] + header_block = self._e.deflate(headers) + return header_block + + + class Decoder(object): + __doc__ = "\n An HPACK decoder object.\n " + + def __init__(self): + self._d = nghttp2.HDInflater() + + @property + def header_table_size(self): + """ + Returns the header table size. For the moment this isn't + useful, so we don't use it. + """ + raise NotImplementedError() + + @header_table_size.setter + def header_table_size(self, value): + log.debug("Setting header table size to %d", value) + self._d.change_table_size(value) + + def decode(self, data): + """ + Takes an HPACK-encoded header block and decodes it into a header + set. + """ + log.debug("Decoding %s", data) + headers = self._d.inflate(data) + return [(n.decode("utf-8"), v.decode("utf-8")) for n, v in headers] + + +else: + from .hpack import Encoder, Decoder diff --git a/APPS_UNCOMPILED/lib/hpack/huffman.py b/APPS_UNCOMPILED/lib/hpack/huffman.py new file mode 100644 index 0000000..e3abd35 --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/huffman.py @@ -0,0 +1,51 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/huffman.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 2521 bytes +""" +hpack/huffman_decoder +~~~~~~~~~~~~~~~~~~~~~ + +An implementation of a bitwise prefix tree specially built for decoding +Huffman-coded content where we already know the Huffman table. +""" +from .compat import to_byte, decode_hex + +class HuffmanEncoder(object): + __doc__ = "\n Encodes a string according to the Huffman encoding table defined in the\n HPACK specification.\n " + + def __init__(self, huffman_code_list, huffman_code_list_lengths): + self.huffman_code_list = huffman_code_list + self.huffman_code_list_lengths = huffman_code_list_lengths + + def encode(self, bytes_to_encode): + """ + Given a string of bytes, encodes them according to the HPACK Huffman + specification. + """ + if not bytes_to_encode: + return b'' + final_num = 0 + final_int_len = 0 + for char in bytes_to_encode: + byte = to_byte(char) + bin_int_len = self.huffman_code_list_lengths[byte] + bin_int = self.huffman_code_list[byte] & 2 ** (bin_int_len + 1) - 1 + final_num <<= bin_int_len + final_num |= bin_int + final_int_len += bin_int_len + + bits_to_be_padded = (8 - final_int_len % 8) % 8 + final_num <<= bits_to_be_padded + final_num |= (1 << bits_to_be_padded) - 1 + final_num = hex(final_num)[2[:None]].rstrip("L") + final_num = "0" + final_num if len(final_num) % 2 != 0 else final_num + total_bytes = (final_int_len + bits_to_be_padded) // 8 + expected_digits = total_bytes * 2 + if len(final_num) != expected_digits: + missing_digits = expected_digits - len(final_num) + final_num = "0" * missing_digits + final_num + return decode_hex(final_num) diff --git a/APPS_UNCOMPILED/lib/hpack/huffman_constants.py b/APPS_UNCOMPILED/lib/hpack/huffman_constants.py new file mode 100644 index 0000000..098ddf8 --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/huffman_constants.py @@ -0,0 +1,305 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/huffman_constants.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 4628 bytes +""" +hpack/huffman_constants +~~~~~~~~~~~~~~~~~~~~~~~ + +Defines the constant Huffman table. This takes up an upsetting amount of space, +but c'est la vie. +""" +REQUEST_CODES = [ + 8184, + 8388568, + 268435426, + 268435427, + 268435428, + 268435429, + 268435430, + 268435431, + 268435432, + 16777194, + 1073741820, + 268435433, + 268435434, + 1073741821, + 268435435, + 268435436, + 268435437, + 268435438, + 268435439, + 268435440, + 268435441, + 268435442, + 1073741822, + 268435443, + 268435444, + 268435445, + 268435446, + 268435447, + 268435448, + 268435449, + 268435450, + 268435451, + 20, + 1016, + 1017, + 4090, + 8185, + 21, + 248, + 2042, + 1018, + 1019, + 249, + 2043, + 250, + 22, + 23, + 24, + 0, + 1, + 2, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 92, + 251, + 32764, + 32, + 4091, + 1020, + 8186, + 33, + 93, + 94, + 95, + 96, + 97, + 98, + 99, + 100, + 101, + 102, + 103, + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 252, + 115, + 253, + 8187, + 524272, + 8188, + 16380, + 34, + 32765, + 3, + 35, + 4, + 36, + 5, + 37, + 38, + 39, + 6, + 116, + 117, + 40, + 41, + 42, + 7, + 43, + 118, + 44, + 8, + 9, + 45, + 119, + 120, + 121, + 122, + 123, + 32766, + 2044, + 16381, + 8189, + 268435452, + 1048550, + 4194258, + 1048551, + 1048552, + 4194259, + 4194260, + 4194261, + 8388569, + 4194262, + 8388570, + 8388571, + 8388572, + 8388573, + 8388574, + 16777195, + 8388575, + 16777196, + 16777197, + 4194263, + 8388576, + 16777198, + 8388577, + 8388578, + 8388579, + 8388580, + 2097116, + 4194264, + 8388581, + 4194265, + 8388582, + 8388583, + 16777199, + 4194266, + 2097117, + 1048553, + 4194267, + 4194268, + 8388584, + 8388585, + 2097118, + 8388586, + 4194269, + 4194270, + 16777200, + 2097119, + 4194271, + 8388587, + 8388588, + 2097120, + 2097121, + 4194272, + 2097122, + 8388589, + 4194273, + 8388590, + 8388591, + 1048554, + 4194274, + 4194275, + 4194276, + 8388592, + 4194277, + 4194278, + 8388593, + 67108832, + 67108833, + 1048555, + 524273, + 4194279, + 8388594, + 4194280, + 33554412, + 67108834, + 67108835, + 67108836, + 134217694, + 134217695, + 67108837, + 16777201, + 33554413, + 524274, + 2097123, + 67108838, + 134217696, + 134217697, + 67108839, + 134217698, + 16777202, + 2097124, + 2097125, + 67108840, + 67108841, + 268435453, + 134217699, + 134217700, + 134217701, + 1048556, + 16777203, + 1048557, + 2097126, + 4194281, + 2097127, + 2097128, + 8388595, + 4194282, + 4194283, + 33554414, + 33554415, + 16777204, + 16777205, + 67108842, + 8388596, + 67108843, + 134217702, + 67108844, + 67108845, + 134217703, + 134217704, + 134217705, + 134217706, + 134217707, + 268435454, + 134217708, + 134217709, + 134217710, + 134217711, + 134217712, + 67108846, + 1073741823] +REQUEST_CODES_LENGTH = [ + 13, 23, 28, 28, 28, 28, 28, 28, 28, 24, 30, 28, 28, 30, 28, 28, + 28, + 28, 28, 28, 28, 28, 30, 28, 28, 28, 28, 28, 28, 28, 28, 28, + 6, + 10, 10, 12, 13, 6, 8, 11, 10, 10, 8, 11, 8, 6, 6, 6, + 5, 5, + 5, 6, 6, 6, 6, 6, 6, 6, 7, 8, 15, 6, 12, 10, + 13, 6, 7, 7, + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, + 7, 7, 7, 7, 7, 7, 7, + 7, 8, 7, 8, 13, 19, 13, 14, 6, + 15, 5, 6, 5, 6, 5, 6, 6, 6, + 5, 7, 7, 6, 6, 6, 5, + 6, 7, 6, 5, 5, 6, 7, 7, 7, 7, 7, 15, + 11, 14, 13, 28, + 20, 22, 20, 20, 22, 22, 22, 23, 22, 23, 23, + 23, 23, 23, 24, 23, + 24, 24, 22, 23, 24, 23, 23, 23, 23, 21, + 22, 23, 22, 23, 23, 24, + 22, 21, 20, 22, 22, 23, 23, 21, 23, + 22, 22, 24, 21, 22, 23, 23, + 21, 21, 22, 21, 23, 22, 23, 23, + 20, 22, 22, 22, 23, 22, 22, 23, + 26, 26, 20, 19, 22, 23, 22, + 25, 26, 26, 26, 27, 27, 26, 24, 25, + 19, 21, 26, 27, 27, 26, + 27, 24, 21, 21, 26, 26, 28, 27, 27, 27, + 20, 24, 20, 21, 22, + 21, 21, 23, 22, 22, 25, 25, 24, 24, 26, 23, + 26, 27, 26, 26, + 27, 27, 27, 27, 27, 28, 27, 27, 27, 27, 27, 26, + 30] diff --git a/APPS_UNCOMPILED/lib/hpack/huffman_table.py b/APPS_UNCOMPILED/lib/hpack/huffman_table.py new file mode 100644 index 0000000..91affaf --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/huffman_table.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/huffman_table.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 168580 bytes diff --git a/APPS_UNCOMPILED/lib/hpack/struct.py b/APPS_UNCOMPILED/lib/hpack/struct.py new file mode 100644 index 0000000..6b1009c --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/struct.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/struct.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 1052 bytes +""" +hpack/struct +~~~~~~~~~~~~ + +Contains structures for representing header fields with associated metadata. +""" + +class HeaderTuple(tuple): + __doc__ = "\n A data structure that stores a single header field.\n\n HTTP headers can be thought of as tuples of ``(field name, field value)``.\n A single header block is a sequence of such tuples.\n\n In HTTP/2, however, certain bits of additional information are required for\n compressing these headers: in particular, whether the header field can be\n safely added to the HPACK compression context.\n\n This class stores a header that can be added to the compression context. In\n all other ways it behaves exactly like a tuple.\n " + __slots__ = () + indexable = True + + def __new__(_cls, *args): + return tuple.__new__(_cls, args) + + +class NeverIndexedHeaderTuple(HeaderTuple): + __doc__ = "\n A data structure that stores a single header field that cannot be added to\n a HTTP/2 header compression context.\n " + __slots__ = () + indexable = False diff --git a/APPS_UNCOMPILED/lib/hpack/table.py b/APPS_UNCOMPILED/lib/hpack/table.py new file mode 100644 index 0000000..a159e86 --- /dev/null +++ b/APPS_UNCOMPILED/lib/hpack/table.py @@ -0,0 +1,162 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/hpack/table.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 8950 bytes +from collections import deque +import logging +from .exceptions import InvalidTableIndex +log = logging.getLogger(__name__) + +def table_entry_size(name, value): + """ + Calculates the size of a single entry + + This size is mostly irrelevant to us and defined + specifically to accommodate memory management for + lower level implementations. The 32 extra bytes are + considered the "maximum" overhead that would be + required to represent each entry in the table. + + See RFC7541 Section 4.1 + """ + return 32 + len(name) + len(value) + + +class HeaderTable(object): + __doc__ = "\n Implements the combined static and dynamic header table\n\n The name and value arguments for all the functions\n should ONLY be byte strings (b'') however this is not\n strictly enforced in the interface.\n\n See RFC7541 Section 2.3\n " + DEFAULT_SIZE = 4096 + STATIC_TABLE = ((b':authority', b''), (b':method', b'GET'), (b':method', b'POST'), + (b':path', b'/'), (b':path', b'/index.html'), (b':scheme', b'http'), + (b':scheme', b'https'), (b':status', b'200'), (b':status', b'204'), + (b':status', b'206'), (b':status', b'304'), (b':status', b'400'), + (b':status', b'404'), (b':status', b'500'), (b'accept-charset', b''), + (b'accept-encoding', b'gzip, deflate'), (b'accept-language', b''), + (b'accept-ranges', b''), (b'accept', b''), (b'access-control-allow-origin', b''), + (b'age', b''), (b'allow', b''), (b'authorization', b''), (b'cache-control', b''), + (b'content-disposition', b''), (b'content-encoding', b''), (b'content-language', b''), + (b'content-length', b''), (b'content-location', b''), (b'content-range', b''), + (b'content-type', b''), (b'cookie', b''), (b'date', b''), (b'etag', b''), + (b'expect', b''), (b'expires', b''), (b'from', b''), (b'host', b''), + (b'if-match', b''), (b'if-modified-since', b''), (b'if-none-match', b''), + (b'if-range', b''), (b'if-unmodified-since', b''), (b'last-modified', b''), + (b'link', b''), (b'location', b''), (b'max-forwards', b''), (b'proxy-authenticate', b''), + (b'proxy-authorization', b''), (b'range', b''), (b'referer', b''), + (b'refresh', b''), (b'retry-after', b''), (b'server', b''), (b'set-cookie', b''), + (b'strict-transport-security', b''), (b'transfer-encoding', b''), + (b'user-agent', b''), (b'vary', b''), (b'via', b''), (b'www-authenticate', b'')) + STATIC_TABLE_LENGTH = len(STATIC_TABLE) + + def __init__(self): + self._maxsize = HeaderTable.DEFAULT_SIZE + self._current_size = 0 + self.resized = False + self.dynamic_entries = deque() + + def get_by_index(self, index): + """ + Returns the entry specified by index + + Note that the table is 1-based ie an index of 0 is + invalid. This is due to the fact that a zero value + index signals that a completely unindexed header + follows. + + The entry will either be from the static table or + the dynamic table depending on the value of index. + """ + original_index = index + index -= 1 + if 0 <= index: + if index < HeaderTable.STATIC_TABLE_LENGTH: + return HeaderTable.STATIC_TABLE[index] + index -= HeaderTable.STATIC_TABLE_LENGTH + if index < len(self.dynamic_entries): + return self.dynamic_entries[index] + raise InvalidTableIndex("Invalid table index %d" % original_index) + + def __repr__(self): + return "HeaderTable(%d, %s, %r)" % ( + self._maxsize, + self.resized, + self.dynamic_entries) + + def add(self, name, value): + """ + Adds a new entry to the table + + We reduce the table size if the entry will make the + table size greater than maxsize. + """ + size = table_entry_size(name, value) + if size > self._maxsize: + self.dynamic_entries.clear() + self._current_size = 0 + else: + self.dynamic_entries.appendleft((name, value)) + self._current_size += size + self._shrink() + + def search(self, name, value): + """ + Searches the table for the entry specified by name + and value + + Returns one of the following: + - ``None``, no match at all + - ``(index, name, None)`` for partial matches on name only. + - ``(index, name, value)`` for perfect matches. + """ + offset = HeaderTable.STATIC_TABLE_LENGTH + 1 + partial = None + for i, (n, v) in enumerate(HeaderTable.STATIC_TABLE): + if n == name: + if v == value: + return ( + i + 1, n, v) + if partial is None: + partial = ( + i + 1, n, None) + + for i, (n, v) in enumerate(self.dynamic_entries): + if n == name: + if v == value: + return ( + i + offset, n, v) + if partial is None: + partial = ( + i + offset, n, None) + + return partial + + @property + def maxsize(self): + return self._maxsize + + @maxsize.setter + def maxsize(self, newmax): + newmax = int(newmax) + log.debug("Resizing header table to %d from %d", newmax, self._maxsize) + oldmax = self._maxsize + self._maxsize = newmax + self.resized = newmax != oldmax + if newmax <= 0: + self.dynamic_entries.clear() + self._current_size = 0 + else: + if oldmax > newmax: + self._shrink() + + def _shrink(self): + """ + Shrinks the dynamic table to be at or below maxsize + """ + cursize = self._current_size + while cursize > self._maxsize: + name, value = self.dynamic_entries.pop() + cursize -= table_entry_size(name, value) + log.debug("Evicting %s: %s from the header table", name, value) + + self._current_size = cursize diff --git a/APPS_UNCOMPILED/lib/jsonpath.py b/APPS_UNCOMPILED/lib/jsonpath.py new file mode 100644 index 0000000..7c270c1 --- /dev/null +++ b/APPS_UNCOMPILED/lib/jsonpath.py @@ -0,0 +1,297 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jsonpath.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 11901 bytes +""" +An XPath for JSON + +A port of the Perl, and JavaScript versions of JSONPath +see http://goessner.net/articles/JsonPath/ + +Based on on JavaScript version by Stefan Goessner at: + https://goessner.net/articles/JsonPath/ + http://code.google.com/p/jsonpath/ +and Perl version by Kate Rhodes at: + http://github.com/masukomi/jsonpath-perl/tree/master + +Python3 compatibily by Per J. Sandstrom +""" +from __future__ import print_function +__author__ = "Phil Budne" +__revision__ = "$Revision: 1.17 $" +__version__ = "0.82" +import re, sys +__all__ = [ + "jsonpath"] +if sys.version_info[0] == 3: + xrange = range + +def normalize(x): + """normalize the path expression; outside jsonpath to allow testing""" + subx = [] + + def f1(m): + n = len(subx) + g1 = m.group(1) + subx.append(g1) + ret = "[#%d]" % n + return ret + + x = re.sub("[\\['](\\??\\(.*?\\))[\\]']", f1, x) + x = re.sub("'?(? 1: + print("\tf03", key, loc, expr, path) + trace(s(key, expr), obj, path) + + walk(loc, x, obj, path, f03) + else: + if loc == "..": + trace(x, obj, path) + + def f04(key, loc, expr, obj, path): + if debug > 1: + print("\tf04", key, loc, expr, path) + elif isinstance(obj, dict): + if key in obj: + trace(s("..", expr), obj[key], s(path, key)) + elif key < len(obj): + trace(s("..", expr), obj[key], s(path, key)) + + walk(loc, x, obj, path, f04) + else: + if loc == "!": + + def f06(key, loc, expr, obj, path): + if isinstance(obj, dict): + trace(expr, key, path) + + walk(loc, x, obj, path, f06) + else: + if isinstance(obj, dict) and loc in obj: + trace(x, obj[loc], s(path, loc)) + else: + if isinstance(obj, list) and isint(loc): + iloc = int(loc) + if debug: + print("----->", iloc, len(obj)) + if len(obj) > iloc: + trace(x, obj[iloc], s(path, loc)) + else: + if loc.startswith("("): + if loc.endswith(")"): + if debug > 1: + print("index", loc) + e = evalx(loc, obj) + trace(s(e, x), obj, path) + return + elif loc.startswith("?("): + if loc.endswith(")"): + if debug > 1: + print("filter", loc) + + def f05(key, loc, expr, obj, path): + if debug > 1: + print("f05", key, loc, expr, path) + elif isinstance(obj, dict): + eval_result = evalx(loc, obj[key]) + else: + eval_result = evalx(loc, obj[int(key)]) + if eval_result: + trace(s(key, expr), obj, path) + + loc = loc[2[:-1]] + walk(loc, x, obj, path, f05) + return + m = re.match("(-?[0-9]*):(-?[0-9]*):?(-?[0-9]*)$", loc) + if m: + if isinstance(obj, (dict, list)): + + def max(x, y): + if x > y: + return x + return y + + def min(x, y): + if x < y: + return x + return y + + objlen = len(obj) + s0 = m.group(1) + s1 = m.group(2) + s2 = m.group(3) + start = int(s0) if s0 else 0 + end = int(s1) if s1 else objlen + step = int(s2) if s2 else 1 + if start < 0: + start = max(0, start + objlen) + else: + start = min(objlen, start) + if end < 0: + end = max(0, end + objlen) + else: + end = min(objlen, end) + for i in xrange(start, end, step): + trace(s(i, x), obj, path) + + else: + return + if loc.find(",") >= 0: + for piece in re.split("'?,'?", loc): + if debug > 1: + print("piece", piece) + trace(s(piece, x), obj, path) + + else: + store(path, obj) + + def walkParse error at or near `COME_FROM' instruction at offset 92_2 + + def evalx(loc, obj): + """eval expression""" + if debug: + print("evalx", loc) + else: + loc = loc.replace("@.length", "len(__obj)") + loc = loc.replace("&&", " and ").replace("||", " or ") + + def notvar(m): + return "'%s' not in __obj" % m.group(1) + + loc = re.sub("!@\\.([a-zA-Z@_0-9-]*)", notvar, loc) + + def varmatch(m): + + def brackets(elts): + ret = "__obj" + for e in elts: + if isint(e): + ret += "[%s]" % e + else: + ret += "['%s']" % e + + return ret + + g1 = m.group(1) + elts = g1.split(".") + if elts[-1] == "length": + return "len(%s)" % brackets(elts[1[:-1]]) + return brackets(elts[1[:None]]) + + loc = re.sub("(?", v) + return v + + caller_globals = sys._getframe(1).f_globals + result = [] + if expr: + if obj: + cleaned_expr = normalize(expr) + if cleaned_expr.startswith("$;"): + cleaned_expr = cleaned_expr[2[:None]] + trace(cleaned_expr, obj, "$") + if len(result) > 0: + return result + return False + + +if __name__ == "__main__": + try: + import json + except ImportError: + import simplejson as json + + import sys + if len(sys.argv) < 3 or len(sys.argv) > 4: + sys.stdout.write("Usage: jsonpath.py FILE PATH [OUTPUT_TYPE]\n") + sys.exit(1) + object = json.load(file(sys.argv[1])) + path = sys.argv[2] + format = "VALUE" + if len(sys.argv) > 3: + format = sys.argv[3] + value = jsonpath(object, path, format) + if not value: + sys.exit(1) + f = sys.stdout + json.dump(value, f, sort_keys=True, indent=1) + f.write("\n") + sys.exit(0) \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/jwt/__init__.py b/APPS_UNCOMPILED/lib/jwt/__init__.py new file mode 100644 index 0000000..b1367a3 --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/__init__.py @@ -0,0 +1,21 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 841 bytes +""" +JSON Web Token implementation + +Minimum implementation based on this spec: +http://self-issued.info/docs/draft-jones-json-web-token-01.html +""" +__title__ = "pyjwt" +__version__ = "1.7.1" +__author__ = "José Padilla" +__license__ = "MIT" +__copyright__ = "Copyright 2015-2018 José Padilla" +from .api_jwt import encode, decode, register_algorithm, unregister_algorithm, get_unverified_header, PyJWT +from .api_jws import PyJWS +from .exceptions import InvalidTokenError, DecodeError, InvalidAlgorithmError, InvalidAudienceError, ExpiredSignatureError, ImmatureSignatureError, InvalidIssuedAtError, InvalidIssuerError, ExpiredSignature, InvalidAudience, InvalidIssuer, MissingRequiredClaimError, InvalidSignatureError, PyJWTError diff --git a/APPS_UNCOMPILED/lib/jwt/__main__.py b/APPS_UNCOMPILED/lib/jwt/__main__.py new file mode 100644 index 0000000..cf64c9c --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/__main__.py @@ -0,0 +1,111 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/__main__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4330 bytes +from __future__ import absolute_import, print_function +import argparse, json, sys, time +from . import DecodeError, __version__, decode, encode + +def encode_payload(args): + if args.key is None: + raise ValueError("Key is required when encoding. See --help for usage.") + payload = {} + for arg in args.payload: + k, v = arg.split("=", 1) + if k == "exp": + if v[0] == "+": + if len(v) > 1: + v = str(int(time.time() + int(v[1[:None]]))) + if v.isdigit(): + v = int(v) + else: + try: + v = float(v) + except ValueError: + pass + + constants = {'true':True, + 'false':False, 'null':None} + if v in constants: + v = constants[v] + payload[k] = v + + token = encode(payload, + key=(args.key), + algorithm=(args.algorithm)) + return token.decode("utf-8") + + +def decode_payload(args): + try: + if args.token: + token = args.token + else: + if sys.stdin.isatty(): + token = sys.stdin.readline().strip() + else: + raise IOError("Cannot read from stdin: terminal not a TTY") + token = token.encode("utf-8") + data = decode(token, key=(args.key), verify=(args.verify)) + return json.dumps(data) + except DecodeError as e: + try: + raise DecodeError("There was an error decoding the token: %s" % e) + finally: + e = None + del e + + +def build_argparser(): + usage = "\n Encodes or decodes JSON Web Tokens based on input.\n\n %(prog)s [options] [options] input\n\n Decoding examples:\n\n %(prog)s --key=secret decode json.web.token\n %(prog)s decode --no-verify json.web.token\n\n Encoding requires the key option and takes space separated key/value pairs\n separated by equals (=) as input. Examples:\n\n %(prog)s --key=secret encode iss=me exp=1302049071\n %(prog)s --key=secret encode foo=bar exp=+10\n\n The exp key is special and can take an offset to current Unix time.\n " + arg_parser = argparse.ArgumentParser(prog="pyjwt", + usage=usage) + arg_parser.add_argument("-v", + "--version", action="version", + version=("%(prog)s " + __version__)) + arg_parser.add_argument("--key", + dest="key", + metavar="KEY", + default=None, + help="set the secret key to sign with") + arg_parser.add_argument("--alg", + dest="algorithm", + metavar="ALG", + default="HS256", + help="set crypto algorithm to sign with. default=HS256") + subparsers = arg_parser.add_subparsers(title="PyJWT subcommands", + description="valid subcommands", + help="additional help") + encode_parser = subparsers.add_parser("encode", help="use to encode a supplied payload") + payload_help = "Payload to encode. Must be a space separated list of key/value\n pairs separated by equals (=) sign." + encode_parser.add_argument("payload", nargs="+", help=payload_help) + encode_parser.set_defaults(func=encode_payload) + decode_parser = subparsers.add_parser("decode", help="use to decode a supplied JSON web token") + decode_parser.add_argument("token", + help="JSON web token to decode.", + nargs="?") + decode_parser.add_argument("-n", + "--no-verify", action="store_false", + dest="verify", + default=True, + help="ignore signature and claims verification on decode") + decode_parser.set_defaults(func=decode_payload) + return arg_parser + + +def main(): + arg_parser = build_argparser() + try: + arguments = arg_parser.parse_args(sys.argv[1[:None]]) + output = arguments.func(arguments) + print(output) + except Exception as e: + try: + print("There was an unforseen error: ", e) + arg_parser.print_help() + finally: + e = None + del e diff --git a/APPS_UNCOMPILED/lib/jwt/algorithms.py b/APPS_UNCOMPILED/lib/jwt/algorithms.py new file mode 100644 index 0000000..fee5209 --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/algorithms.py @@ -0,0 +1,316 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/algorithms.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 13739 bytes +import hashlib, hmac, json +from .compat import constant_time_compare, string_types +from .exceptions import InvalidKeyError +from .utils import base64url_decode, base64url_encode, der_to_raw_signature, force_bytes, force_unicode, from_base64url_uint, raw_to_der_signature, to_base64url_uint +try: + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.serialization import load_pem_private_key, load_pem_public_key, load_ssh_public_key + from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey, RSAPrivateNumbers, RSAPublicNumbers, rsa_recover_prime_factors, rsa_crt_dmp1, rsa_crt_dmq1, rsa_crt_iqmp + from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey, EllipticCurvePublicKey + from cryptography.hazmat.primitives.asymmetric import ec, padding + from cryptography.hazmat.backends import default_backend + from cryptography.exceptions import InvalidSignature + has_crypto = True +except ImportError: + has_crypto = False + +requires_cryptography = set(['RS256', 'RS384', 'RS512', 'ES256', 'ES384', + 'ES521', 'ES512', 'PS256', + 'PS384', 'PS512']) + +def get_default_algorithms(): + """ + Returns the algorithms that are implemented by the library. + """ + default_algorithms = {'none':NoneAlgorithm(), + 'HS256':HMACAlgorithm(HMACAlgorithm.SHA256), + 'HS384':HMACAlgorithm(HMACAlgorithm.SHA384), + 'HS512':HMACAlgorithm(HMACAlgorithm.SHA512)} + if has_crypto: + default_algorithms.update({'RS256':RSAAlgorithm(RSAAlgorithm.SHA256), + 'RS384':RSAAlgorithm(RSAAlgorithm.SHA384), + 'RS512':RSAAlgorithm(RSAAlgorithm.SHA512), + 'ES256':ECAlgorithm(ECAlgorithm.SHA256), + 'ES384':ECAlgorithm(ECAlgorithm.SHA384), + 'ES521':ECAlgorithm(ECAlgorithm.SHA512), + 'ES512':ECAlgorithm(ECAlgorithm.SHA512), + 'PS256':RSAPSSAlgorithm(RSAPSSAlgorithm.SHA256), + 'PS384':RSAPSSAlgorithm(RSAPSSAlgorithm.SHA384), + 'PS512':RSAPSSAlgorithm(RSAPSSAlgorithm.SHA512)}) + return default_algorithms + + +class Algorithm(object): + __doc__ = "\n The interface for an algorithm used to sign and verify tokens.\n " + + def prepare_key(self, key): + """ + Performs necessary validation and conversions on the key and returns + the key value in the proper format for sign() and verify(). + """ + raise NotImplementedError + + def sign(self, msg, key): + """ + Returns a digital signature for the specified message + using the specified key value. + """ + raise NotImplementedError + + def verify(self, msg, key, sig): + """ + Verifies that the specified digital signature is valid + for the specified message and key values. + """ + raise NotImplementedError + + @staticmethod + def to_jwk(key_obj): + """ + Serializes a given RSA key into a JWK + """ + raise NotImplementedError + + @staticmethod + def from_jwk(jwk): + """ + Deserializes a given RSA key from JWK back into a PublicKey or PrivateKey object + """ + raise NotImplementedError + + +class NoneAlgorithm(Algorithm): + __doc__ = "\n Placeholder for use when no signing or verification\n operations are required.\n " + + def prepare_key(self, key): + if key == "": + key = None + if key is not None: + raise InvalidKeyError('When alg = "none", key value must be None.') + return key + + def sign(self, msg, key): + return b'' + + def verify(self, msg, key, sig): + return False + + +class HMACAlgorithm(Algorithm): + __doc__ = "\n Performs signing and verification operations using HMAC\n and the specified hash function.\n " + SHA256 = hashlib.sha256 + SHA384 = hashlib.sha384 + SHA512 = hashlib.sha512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + key = force_bytes(key) + invalid_strings = [ + b'-----BEGIN PUBLIC KEY-----', + b'-----BEGIN CERTIFICATE-----', + b'-----BEGIN RSA PUBLIC KEY-----', + b'ssh-rsa'] + if any([string_value in key for string_value in invalid_strings]): + raise InvalidKeyError("The specified key is an asymmetric key or x509 certificate and should not be used as an HMAC secret.") + return key + + @staticmethod + def to_jwk(key_obj): + return json.dumps({'k':force_unicode(base64url_encode(force_bytes(key_obj))), + 'kty':"oct"}) + + @staticmethod + def from_jwk(jwk): + obj = json.loads(jwk) + if obj.get("kty") != "oct": + raise InvalidKeyError("Not an HMAC key") + return base64url_decode(obj["k"]) + + def sign(self, msg, key): + return hmac.new(key, msg, self.hash_alg).digest() + + def verify(self, msg, key, sig): + return constant_time_compare(sig, self.sign(msg, key)) + + +if has_crypto: + + class RSAAlgorithm(Algorithm): + __doc__ = "\n Performs signing and verification operations using\n RSASSA-PKCS-v1_5 and the specified hash function.\n " + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + if isinstance(key, RSAPrivateKey) or isinstance(key, RSAPublicKey): + return key + elif isinstance(key, string_types): + key = force_bytes(key) + try: + if key.startswith(b'ssh-rsa'): + key = load_ssh_public_key(key, backend=(default_backend())) + else: + key = load_pem_private_key(key, password=None, backend=(default_backend())) + except ValueError: + key = load_pem_public_key(key, backend=(default_backend())) + + else: + raise TypeError("Expecting a PEM-formatted key.") + return key + + @staticmethod + def to_jwk(key_obj): + obj = None + if getattr(key_obj, "private_numbers", None): + numbers = key_obj.private_numbers() + obj = {'kty':"RSA", + 'key_ops':[ + "sign"], + 'n':force_unicode(to_base64url_uint(numbers.public_numbers.n)), + 'e':force_unicode(to_base64url_uint(numbers.public_numbers.e)), + 'd':force_unicode(to_base64url_uint(numbers.d)), + 'p':force_unicode(to_base64url_uint(numbers.p)), + 'q':force_unicode(to_base64url_uint(numbers.q)), + 'dp':force_unicode(to_base64url_uint(numbers.dmp1)), + 'dq':force_unicode(to_base64url_uint(numbers.dmq1)), + 'qi':force_unicode(to_base64url_uint(numbers.iqmp))} + else: + if getattr(key_obj, "verify", None): + numbers = key_obj.public_numbers() + obj = {'kty':"RSA", + 'key_ops':[ + "verify"], + 'n':force_unicode(to_base64url_uint(numbers.n)), + 'e':force_unicode(to_base64url_uint(numbers.e))} + else: + raise InvalidKeyError("Not a public or private key") + return json.dumps(obj) + + @staticmethod + def from_jwk(jwk): + try: + obj = json.loads(jwk) + except ValueError: + raise InvalidKeyError("Key is not valid JSON") + + if obj.get("kty") != "RSA": + raise InvalidKeyError("Not an RSA key") + elif "d" in obj: + if "e" in obj and "n" in obj: + if "oth" in obj: + raise InvalidKeyError("Unsupported RSA private key: > 2 primes not supported") + else: + other_props = [ + 'p', 'q', + 'dp', 'dq', 'qi'] + props_found = [prop in obj for prop in other_props] + any_props_found = any(props_found) + if any_props_found: + if not all(props_found): + raise InvalidKeyError("RSA key must include all parameters if any are present besides d") + public_numbers = RSAPublicNumbers(from_base64url_uint(obj["e"]), from_base64url_uint(obj["n"])) + if any_props_found: + numbers = RSAPrivateNumbers(d=(from_base64url_uint(obj["d"])), + p=(from_base64url_uint(obj["p"])), + q=(from_base64url_uint(obj["q"])), + dmp1=(from_base64url_uint(obj["dp"])), + dmq1=(from_base64url_uint(obj["dq"])), + iqmp=(from_base64url_uint(obj["qi"])), + public_numbers=public_numbers) + else: + d = from_base64url_uint(obj["d"]) + p, q = rsa_recover_prime_factors(public_numbers.n, d, public_numbers.e) + numbers = RSAPrivateNumbers(d=d, + p=p, + q=q, + dmp1=(rsa_crt_dmp1(d, p)), + dmq1=(rsa_crt_dmq1(d, q)), + iqmp=(rsa_crt_iqmp(p, q)), + public_numbers=public_numbers) + return numbers.private_key(default_backend()) + if "n" in obj: + if "e" in obj: + numbers = RSAPublicNumbers(from_base64url_uint(obj["e"]), from_base64url_uint(obj["n"])) + return numbers.public_key(default_backend()) + raise InvalidKeyError("Not a public or private key") + + def sign(self, msg, key): + return key.sign(msg, padding.PKCS1v15(), self.hash_alg()) + + def verify(self, msg, key, sig): + try: + key.verify(sig, msg, padding.PKCS1v15(), self.hash_alg()) + return True + except InvalidSignature: + return False + + + class ECAlgorithm(Algorithm): + __doc__ = "\n Performs signing and verification operations using\n ECDSA and the specified hash function\n " + SHA256 = hashes.SHA256 + SHA384 = hashes.SHA384 + SHA512 = hashes.SHA512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + if isinstance(key, EllipticCurvePrivateKey) or isinstance(key, EllipticCurvePublicKey): + return key + elif isinstance(key, string_types): + key = force_bytes(key) + try: + if key.startswith(b'ecdsa-sha2-'): + key = load_ssh_public_key(key, backend=(default_backend())) + else: + key = load_pem_public_key(key, backend=(default_backend())) + except ValueError: + key = load_pem_private_key(key, password=None, backend=(default_backend())) + + else: + raise TypeError("Expecting a PEM-formatted key.") + return key + + def sign(self, msg, key): + der_sig = key.sign(msg, ec.ECDSA(self.hash_alg())) + return der_to_raw_signature(der_sig, key.curve) + + def verify(self, msg, key, sig): + try: + der_sig = raw_to_der_signature(sig, key.curve) + except ValueError: + return False + else: + try: + key.verify(der_sig, msg, ec.ECDSA(self.hash_alg())) + return True + except InvalidSignature: + return False + + + class RSAPSSAlgorithm(RSAAlgorithm): + __doc__ = "\n Performs a signature using RSASSA-PSS with MGF1\n " + + def sign(self, msg, key): + return key.sign(msg, padding.PSS(mgf=(padding.MGF1(self.hash_alg())), + salt_length=(self.hash_alg.digest_size)), self.hash_alg()) + + def verify(self, msg, key, sig): + try: + key.verify(sig, msg, padding.PSS(mgf=(padding.MGF1(self.hash_alg())), + salt_length=(self.hash_alg.digest_size)), self.hash_alg()) + return True + except InvalidSignature: + return False diff --git a/APPS_UNCOMPILED/lib/jwt/api_jws.py b/APPS_UNCOMPILED/lib/jwt/api_jws.py new file mode 100644 index 0000000..66f33f2 --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/api_jws.py @@ -0,0 +1,186 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/api_jws.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 8337 bytes +import binascii, json, warnings +try: + from typing import Callable, Dict, List, Optional, Union +except ImportError: + pass + +from .algorithms import Algorithm, get_default_algorithms, has_crypto, requires_cryptography +from .compat import Mapping, binary_type, string_types, text_type +from .exceptions import DecodeError, InvalidAlgorithmError, InvalidSignatureError, InvalidTokenError +from .utils import base64url_decode, base64url_encode, force_bytes, merge_dict + +class PyJWS(object): + header_typ = "JWT" + + def __init__(self, algorithms=None, options=None): + self._algorithms = get_default_algorithms() + self._valid_algs = set(algorithms) if algorithms is not None else set(self._algorithms) + for key in list(self._algorithms.keys()): + if key not in self._valid_algs: + del self._algorithms[key] + + if not options: + options = {} + self.options = merge_dict(self._get_default_options(), options) + + @staticmethod + def _get_default_options(): + return {"verify_signature": True} + + def register_algorithm(self, alg_id, alg_obj): + """ + Registers a new Algorithm for use when creating and verifying tokens. + """ + if alg_id in self._algorithms: + raise ValueError("Algorithm already has a handler.") + if not isinstance(alg_obj, Algorithm): + raise TypeError("Object is not of type `Algorithm`") + self._algorithms[alg_id] = alg_obj + self._valid_algs.add(alg_id) + + def unregister_algorithm(self, alg_id): + """ + Unregisters an Algorithm for use when creating and verifying tokens + Throws KeyError if algorithm is not registered. + """ + if alg_id not in self._algorithms: + raise KeyError("The specified algorithm could not be removed because it is not registered.") + del self._algorithms[alg_id] + self._valid_algs.remove(alg_id) + + def get_algorithms(self): + """ + Returns a list of supported values for the 'alg' parameter. + """ + return list(self._valid_algs) + + def encode(self, payload, key, algorithm='HS256', headers=None, json_encoder=None): + segments = [] + if algorithm is None: + algorithm = "none" + if algorithm not in self._valid_algs: + pass + header = {'typ':self.header_typ, 'alg':algorithm} + if headers: + self._validate_headers(headers) + header.update(headers) + json_header = force_bytes(json.dumps(header, + separators=(',', ':'), + cls=json_encoder)) + segments.append(base64url_encode(json_header)) + segments.append(base64url_encode(payload)) + signing_input = (b'.').join(segments) + try: + alg_obj = self._algorithms[algorithm] + key = alg_obj.prepare_key(key) + signature = alg_obj.sign(signing_input, key) + except KeyError: + if (has_crypto or algorithm) in requires_cryptography: + raise NotImplementedError("Algorithm '%s' could not be found. Do you have cryptography installed?" % algorithm) + else: + raise NotImplementedError("Algorithm not supported") + + segments.append(base64url_encode(signature)) + return (b'.').join(segments) + + def decode(self, jwt, key='', verify=True, algorithms=None, options=None, **kwargs): + merged_options = merge_dict(self.options, options) + verify_signature = merged_options["verify_signature"] + if verify_signature: + if not algorithms: + warnings.warn('It is strongly recommended that you pass in a value for the "algorithms" argument when calling decode(). This argument will be mandatory in a future version.', DeprecationWarning) + else: + payload, signing_input, header, signature = self._load(jwt) + if not verify: + warnings.warn("The verify parameter is deprecated. Please use verify_signature in options instead.", DeprecationWarning, + stacklevel=2) + else: + if verify_signature: + self._verify_signature(payload, signing_input, header, signature, key, algorithms) + return payload + + def get_unverified_header(self, jwt): + """Returns back the JWT header parameters as a dict() + + Note: The signature is not verified so the header parameters + should not be fully trusted until signature verification is complete + """ + headers = self._load(jwt)[2] + self._validate_headers(headers) + return headers + + def _load(self, jwt): + if isinstance(jwt, text_type): + jwt = jwt.encode("utf-8") + elif not issubclass(type(jwt), binary_type): + raise DecodeError("Invalid token type. Token must be a {0}".format(binary_type)) + else: + try: + signing_input, crypto_segment = jwt.rsplit(b'.', 1) + header_segment, payload_segment = signing_input.split(b'.', 1) + except ValueError: + raise DecodeError("Not enough segments") + + try: + header_data = base64url_decode(header_segment) + except (TypeError, binascii.Error): + raise DecodeError("Invalid header padding") + + try: + header = json.loads(header_data.decode("utf-8")) + except ValueError as e: + try: + raise DecodeError("Invalid header string: %s" % e) + finally: + e = None + del e + + if not isinstance(header, Mapping): + raise DecodeError("Invalid header string: must be a json object") + try: + payload = base64url_decode(payload_segment) + except (TypeError, binascii.Error): + raise DecodeError("Invalid payload padding") + + try: + signature = base64url_decode(crypto_segment) + except (TypeError, binascii.Error): + raise DecodeError("Invalid crypto padding") + + return (payload, signing_input, header, signature) + + def _verify_signature(self, payload, signing_input, header, signature, key='', algorithms=None): + alg = header.get("alg") + if algorithms is not None: + if alg not in algorithms: + raise InvalidAlgorithmError("The specified alg value is not allowed") + try: + alg_obj = self._algorithms[alg] + key = alg_obj.prepare_key(key) + if not alg_obj.verify(signing_input, key, signature): + raise InvalidSignatureError("Signature verification failed") + except KeyError: + raise InvalidAlgorithmError("Algorithm not supported") + + def _validate_headers(self, headers): + if "kid" in headers: + self._validate_kid(headers["kid"]) + + def _validate_kid(self, kid): + if not isinstance(kid, string_types): + raise InvalidTokenError("Key ID header parameter must be a string") + + +_jws_global_obj = PyJWS() +encode = _jws_global_obj.encode +decode = _jws_global_obj.decode +register_algorithm = _jws_global_obj.register_algorithm +unregister_algorithm = _jws_global_obj.unregister_algorithm +get_unverified_header = _jws_global_obj.get_unverified_header diff --git a/APPS_UNCOMPILED/lib/jwt/api_jwt.py b/APPS_UNCOMPILED/lib/jwt/api_jwt.py new file mode 100644 index 0000000..ee91368 --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/api_jwt.py @@ -0,0 +1,170 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/api_jwt.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 8127 bytes +import json, warnings +from calendar import timegm +from datetime import datetime, timedelta +try: + from typing import Callable, Dict, List, Optional, Union +except ImportError: + pass + +from .api_jws import PyJWS +from .algorithms import Algorithm, get_default_algorithms +from .compat import Iterable, Mapping, string_types +from .exceptions import DecodeError, ExpiredSignatureError, ImmatureSignatureError, InvalidAudienceError, InvalidIssuedAtError, InvalidIssuerError, MissingRequiredClaimError +from .utils import merge_dict + +class PyJWT(PyJWS): + header_type = "JWT" + + @staticmethod + def _get_default_options(): + return { + 'verify_signature': True, + 'verify_exp': True, + 'verify_nbf': True, + 'verify_iat': True, + 'verify_aud': True, + 'verify_iss': True, + 'require_exp': False, + 'require_iat': False, + 'require_nbf': False} + + def encode(self, payload, key, algorithm='HS256', headers=None, json_encoder=None): + if not isinstance(payload, Mapping): + raise TypeError("Expecting a mapping object, as JWT only supports JSON objects as payloads.") + for time_claim in ('exp', 'iat', 'nbf'): + if isinstance(payload.get(time_claim), datetime): + payload[time_claim] = timegm(payload[time_claim].utctimetuple()) + + json_payload = json.dumps(payload, + separators=(',', ':'), + cls=json_encoder).encode("utf-8") + return super(PyJWT, self).encode(json_payload, key, algorithm, headers, json_encoder) + + def decode(self, jwt, key='', verify=True, algorithms=None, options=None, **kwargs): + if verify: + if not algorithms: + warnings.warn('It is strongly recommended that you pass in a value for the "algorithms" argument when calling decode(). This argument will be mandatory in a future version.', DeprecationWarning) + else: + payload, _, _, _ = self._load(jwt) + if options is None: + options = {"verify_signature": verify} + else: + options.setdefault("verify_signature", verify) + decoded = (super(PyJWT, self).decode)(jwt, key=key, algorithms=algorithms, options=options, **kwargs) + try: + payload = json.loads(decoded.decode("utf-8")) + except ValueError as e: + try: + raise DecodeError("Invalid payload string: %s" % e) + finally: + e = None + del e + + if not isinstance(payload, Mapping): + raise DecodeError("Invalid payload string: must be a json object") + if verify: + merged_options = merge_dict(self.options, options) + (self._validate_claims)(payload, merged_options, **kwargs) + return payload + + def _validate_claims(self, payload, options, audience=None, issuer=None, leeway=0, **kwargs): + if "verify_expiration" in kwargs: + options["verify_exp"] = kwargs.get("verify_expiration", True) + warnings.warn("The verify_expiration parameter is deprecated. Please use verify_exp in options instead.", DeprecationWarning) + else: + if isinstance(leeway, timedelta): + leeway = leeway.total_seconds() + elif not isinstance(audience, (string_types, type(None), Iterable)): + raise TypeError("audience must be a string, iterable, or None") + self._validate_required_claims(payload, options) + now = timegm(datetime.utcnow().utctimetuple()) + if "iat" in payload: + if options.get("verify_iat"): + self._validate_iat(payload, now, leeway) + if "nbf" in payload and options.get("verify_nbf"): + self._validate_nbf(payload, now, leeway) + if "exp" in payload and options.get("verify_exp"): + self._validate_exp(payload, now, leeway) + if options.get("verify_iss"): + self._validate_iss(payload, issuer) + if options.get("verify_aud"): + self._validate_aud(payload, audience) + + def _validate_required_claims(self, payload, options): + if options.get("require_exp"): + if payload.get("exp") is None: + raise MissingRequiredClaimError("exp") + elif options.get("require_iat"): + if payload.get("iat") is None: + raise MissingRequiredClaimError("iat") + if options.get("require_nbf") and payload.get("nbf") is None: + raise MissingRequiredClaimError("nbf") + + def _validate_iat(self, payload, now, leeway): + try: + int(payload["iat"]) + except ValueError: + raise InvalidIssuedAtError("Issued At claim (iat) must be an integer.") + + def _validate_nbf(self, payload, now, leeway): + try: + nbf = int(payload["nbf"]) + except ValueError: + raise DecodeError("Not Before claim (nbf) must be an integer.") + + if nbf > now + leeway: + raise ImmatureSignatureError("The token is not yet valid (nbf)") + + def _validate_exp(self, payload, now, leeway): + try: + exp = int(payload["exp"]) + except ValueError: + raise DecodeError("Expiration Time claim (exp) must be an integer.") + + if exp < now - leeway: + raise ExpiredSignatureError("Signature has expired") + + def _validate_aud(self, payload, audience): + if audience is None: + if "aud" not in payload: + return + elif audience is not None and "aud" not in payload: + raise MissingRequiredClaimError("aud") + if audience is None: + if "aud" in payload: + raise InvalidAudienceError("Invalid audience") + audience_claims = payload["aud"] + if isinstance(audience_claims, string_types): + audience_claims = [ + audience_claims] + elif not isinstance(audience_claims, list): + raise InvalidAudienceError("Invalid claim format in token") + if any((not isinstance(c, string_types) for c in audience_claims)): + raise InvalidAudienceError("Invalid claim format in token") + if isinstance(audience, string_types): + audience = [ + audience] + assert any((aud in audience_claims for aud in audience)), "Invalid audience" + + def _validate_iss(self, payload, issuer): + if issuer is None: + return + if "iss" not in payload: + raise MissingRequiredClaimError("iss") + if payload["iss"] != issuer: + raise InvalidIssuerError("Invalid issuer") + + +_jwt_global_obj = PyJWT() +encode = _jwt_global_obj.encode +decode = _jwt_global_obj.decode +register_algorithm = _jwt_global_obj.register_algorithm +unregister_algorithm = _jwt_global_obj.unregister_algorithm +get_unverified_header = _jwt_global_obj.get_unverified_header diff --git a/APPS_UNCOMPILED/lib/jwt/compat.py b/APPS_UNCOMPILED/lib/jwt/compat.py new file mode 100644 index 0000000..a250a0d --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/compat.py @@ -0,0 +1,66 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/compat.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1692 bytes +""" +The `compat` module provides support for backwards compatibility with older +versions of python, and compatibility wrappers around optional packages. +""" +import hmac, struct, sys +PY3 = sys.version_info[0] == 3 +if PY3: + text_type = str + binary_type = bytes +else: + text_type = unicode + binary_type = str +string_types = (text_type, binary_type) +try: + from collections.abc import Iterable, Mapping +except ImportError: + from collections import Iterable, Mapping + +try: + constant_time_compare = hmac.compare_digest +except AttributeError: + + def constant_time_compare(val1, val2): + """ + Returns True if the two strings are equal, False otherwise. + + The time taken is independent of the number of characters that match. + """ + if len(val1) != len(val2): + return False + result = 0 + for x, y in zip(val1, val2): + result |= ord(x) ^ ord(y) + + return result == 0 + + +if getattr(int, "to_bytes", None): + + def bytes_from_int(val): + remaining = val + byte_length = 0 + while remaining != 0: + remaining = remaining >> 8 + byte_length += 1 + + return val.to_bytes(byte_length, "big", signed=False) + + +else: + + def bytes_from_int(val): + buf = [] + while val: + val, remainder = divmod(val, 256) + buf.append(remainder) + + buf.reverse() + return (struct.pack)("%sB" % len(buf), *buf) diff --git a/APPS_UNCOMPILED/lib/jwt/contrib/__init__.py b/APPS_UNCOMPILED/lib/jwt/contrib/__init__.py new file mode 100644 index 0000000..7454f7e --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/contrib/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/contrib/__init__.py +# Compiled at: 2024-04-18 03:12:55 +pass diff --git a/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/__init__.py b/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/__init__.py new file mode 100644 index 0000000..4a1eb5d --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/contrib/algorithms/__init__.py +# Compiled at: 2024-04-18 03:12:55 +pass diff --git a/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/py_ecdsa.py b/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/py_ecdsa.py new file mode 100644 index 0000000..41e02dd --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/py_ecdsa.py @@ -0,0 +1,43 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/contrib/algorithms/py_ecdsa.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1831 bytes +import hashlib, ecdsa +from jwt.algorithms import Algorithm +from jwt.compat import string_types, text_type + +class ECAlgorithm(Algorithm): + __doc__ = "\n Performs signing and verification operations using\n ECDSA and the specified hash function\n\n This class requires the ecdsa package to be installed.\n\n This is based off of the implementation in PyJWT 0.3.2\n " + SHA256 = hashlib.sha256 + SHA384 = hashlib.sha384 + SHA512 = hashlib.sha512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + if isinstance(key, ecdsa.SigningKey) or isinstance(key, ecdsa.VerifyingKey): + return key + elif isinstance(key, string_types): + if isinstance(key, text_type): + key = key.encode("utf-8") + try: + key = ecdsa.VerifyingKey.from_pem(key) + except ecdsa.der.UnexpectedDER: + key = ecdsa.SigningKey.from_pem(key) + + else: + raise TypeError("Expecting a PEM-formatted key.") + return key + + def sign(self, msg, key): + return key.sign(msg, hashfunc=(self.hash_alg), sigencode=(ecdsa.util.sigencode_string)) + + def verify(self, msg, key, sig): + try: + return key.verify(sig, msg, hashfunc=(self.hash_alg), sigdecode=(ecdsa.util.sigdecode_string)) + except AssertionError: + return False diff --git a/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/pycrypto.py b/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/pycrypto.py new file mode 100644 index 0000000..cd3b78c --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/contrib/algorithms/pycrypto.py @@ -0,0 +1,38 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/contrib/algorithms/pycrypto.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1295 bytes +import Crypto.Hash.SHA256, Crypto.Hash.SHA384, Crypto.Hash.SHA512 +from Crypto.PublicKey import RSA +from Crypto.Signature import PKCS1_v1_5 +from jwt.algorithms import Algorithm +from jwt.compat import string_types, text_type + +class RSAAlgorithm(Algorithm): + __doc__ = "\n Performs signing and verification operations using\n RSASSA-PKCS-v1_5 and the specified hash function.\n\n This class requires PyCrypto package to be installed.\n\n This is based off of the implementation in PyJWT 0.3.2\n " + SHA256 = Crypto.Hash.SHA256 + SHA384 = Crypto.Hash.SHA384 + SHA512 = Crypto.Hash.SHA512 + + def __init__(self, hash_alg): + self.hash_alg = hash_alg + + def prepare_key(self, key): + if isinstance(key, RSA._RSAobj): + return key + elif isinstance(key, string_types): + if isinstance(key, text_type): + key = key.encode("utf-8") + key = RSA.importKey(key) + else: + raise TypeError("Expecting a PEM- or RSA-formatted key.") + return key + + def sign(self, msg, key): + return PKCS1_v1_5.new(key).sign(self.hash_alg.new(msg)) + + def verify(self, msg, key, sig): + return PKCS1_v1_5.new(key).verify(self.hash_alg.new(msg), sig) diff --git a/APPS_UNCOMPILED/lib/jwt/exceptions.py b/APPS_UNCOMPILED/lib/jwt/exceptions.py new file mode 100644 index 0000000..0df7e7d --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/exceptions.py @@ -0,0 +1,65 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/exceptions.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1045 bytes + + +class PyJWTError(Exception): + __doc__ = "\n Base class for all exceptions\n " + + +class InvalidTokenError(PyJWTError): + pass + + +class DecodeError(InvalidTokenError): + pass + + +class InvalidSignatureError(DecodeError): + pass + + +class ExpiredSignatureError(InvalidTokenError): + pass + + +class InvalidAudienceError(InvalidTokenError): + pass + + +class InvalidIssuerError(InvalidTokenError): + pass + + +class InvalidIssuedAtError(InvalidTokenError): + pass + + +class ImmatureSignatureError(InvalidTokenError): + pass + + +class InvalidKeyError(PyJWTError): + pass + + +class InvalidAlgorithmError(InvalidTokenError): + pass + + +class MissingRequiredClaimError(InvalidTokenError): + + def __init__(self, claim): + self.claim = claim + + def __str__(self): + return 'Token is missing the "%s" claim' % self.claim + + +ExpiredSignature = ExpiredSignatureError +InvalidAudience = InvalidAudienceError +InvalidIssuer = InvalidIssuerError diff --git a/APPS_UNCOMPILED/lib/jwt/help.py b/APPS_UNCOMPILED/lib/jwt/help.py new file mode 100644 index 0000000..67900f8 --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/help.py @@ -0,0 +1,59 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/help.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1670 bytes +from __future__ import print_function +import json, platform, sys +from . import __version__ as pyjwt_version +try: + import cryptography +except ImportError: + cryptography = None + +try: + import ecdsa +except ImportError: + ecdsa = None + +def info(): + """ + Generate information for a bug report. + Based on the requests package help utility module. + """ + try: + platform_info = {'system':(platform.system)(), + 'release':(platform.release)()} + except IOError: + platform_info = {'system':"Unknown", + 'release':"Unknown"} + + implementation = platform.python_implementation() + if implementation == "CPython": + implementation_version = platform.python_version() + else: + if implementation == "PyPy": + implementation_version = "%s.%s.%s" % ( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join([ + implementation_version, sys.pypy_version_info.releaselevel]) + else: + implementation_version = "Unknown" + return {'platform':platform_info, 'implementation':{'name':implementation, + 'version':implementation_version}, + 'cryptography':{"version": (getattr(cryptography, "__version__", ""))}, + 'pyjwt':{"version": pyjwt_version}} + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps((info()), sort_keys=True, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/lib/jwt/utils.py b/APPS_UNCOMPILED/lib/jwt/utils.py new file mode 100644 index 0000000..65b0797 --- /dev/null +++ b/APPS_UNCOMPILED/lib/jwt/utils.py @@ -0,0 +1,101 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/jwt/utils.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2742 bytes +import base64, binascii, struct +from .compat import binary_type, bytes_from_int, text_type +try: + from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature, encode_dss_signature +except ImportError: + pass + +def force_unicode(value): + if isinstance(value, binary_type): + return value.decode("utf-8") + if isinstance(value, text_type): + return value + raise TypeError("Expected a string value") + + +def force_bytes(value): + if isinstance(value, text_type): + return value.encode("utf-8") + if isinstance(value, binary_type): + return value + raise TypeError("Expected a string value") + + +def base64url_decode(input): + if isinstance(input, text_type): + input = input.encode("ascii") + rem = len(input) % 4 + if rem > 0: + input += b'=' * (4 - rem) + return base64.urlsafe_b64decode(input) + + +def base64url_encode(input): + return base64.urlsafe_b64encode(input).replace(b'=', b'') + + +def to_base64url_uint(val): + if val < 0: + raise ValueError("Must be a positive integer") + int_bytes = bytes_from_int(val) + if len(int_bytes) == 0: + int_bytes = b'\x00' + return base64url_encode(int_bytes) + + +def from_base64url_uint(val): + if isinstance(val, text_type): + val = val.encode("ascii") + data = base64url_decode(val) + buf = struct.unpack("%sB" % len(data), data) + return int("".join(["%02x" % byte for byte in buf]), 16) + + +def merge_dict(original, updates): + if not updates: + return original + try: + merged_options = original.copy() + merged_options.update(updates) + except (AttributeError, ValueError) as e: + try: + raise TypeError("original and updates must be a dictionary: %s" % e) + finally: + e = None + del e + + return merged_options + + +def number_to_bytes(num, num_bytes): + padded_hex = "%0*x" % (2 * num_bytes, num) + big_endian = binascii.a2b_hex(padded_hex.encode("ascii")) + return big_endian + + +def bytes_to_number(string): + return int(binascii.b2a_hex(string), 16) + + +def der_to_raw_signature(der_sig, curve): + num_bits = curve.key_size + num_bytes = (num_bits + 7) // 8 + r, s = decode_dss_signature(der_sig) + return number_to_bytes(r, num_bytes) + number_to_bytes(s, num_bytes) + + +def raw_to_der_signature(raw_sig, curve): + num_bits = curve.key_size + num_bytes = (num_bits + 7) // 8 + if len(raw_sig) != 2 * num_bytes: + raise ValueError("Invalid signature") + r = bytes_to_number(raw_sig[None[:num_bytes]]) + s = bytes_to_number(raw_sig[num_bytes[:None]]) + return encode_dss_signature(r, s) diff --git a/APPS_UNCOMPILED/lib/libevent.py b/APPS_UNCOMPILED/lib/libevent.py new file mode 100644 index 0000000..8587ca2 --- /dev/null +++ b/APPS_UNCOMPILED/lib/libevent.py @@ -0,0 +1,44 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/libevent.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1321 bytes +from _libevent import * +import weakref + +class Timer(Event): + __doc__ = "Simplified class for timers." + __slots__ = "_callback" + + def __init__(self, base, callback, userdata=None): + if not callable(callback): + raise TypeError("the callback must be callable") + + def _fire(evt, fd, what, userdata, selfref=weakref.ref(self)): + """Special internal class to prevent circular references.""" + self = selfref() + if self is not None: + self._callback(self, userdata) + + super(Timer, self).__init__(base, -1, 0, _fire, userdata) + self._callback = callback + + +class Signal(Event): + __doc__ = "Simplified class for signals." + __slots__ = "_callback" + + def __init__(self, base, signum, callback, userdata=None): + if not callable(callback): + raise TypeError("the callback must be callable") + + def _fire(evt, fd, what, userdata, selfref=weakref.ref(self)): + """Special internal class to prevent circular references.""" + self = selfref() + if self is not None: + self._callback(self, fd, userdata) + + super(Signal, self).__init__(base, signum, EV_SIGNAL | EV_PERSIST, _fire, userdata) + self._callback = callback diff --git a/APPS_UNCOMPILED/lib/linkkit/__init__.py b/APPS_UNCOMPILED/lib/linkkit/__init__.py new file mode 100644 index 0000000..ad91ad1 --- /dev/null +++ b/APPS_UNCOMPILED/lib/linkkit/__init__.py @@ -0,0 +1,8 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/linkkit/__init__.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 16 bytes +name = "linkkit" diff --git a/APPS_UNCOMPILED/lib/linkkit/h2client.py b/APPS_UNCOMPILED/lib/linkkit/h2client.py new file mode 100644 index 0000000..24eb949 --- /dev/null +++ b/APPS_UNCOMPILED/lib/linkkit/h2client.py @@ -0,0 +1,496 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/linkkit/h2client.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 19761 bytes +import hyper, time, hmac, ssl, logging, os, threading, crcmod, concurrent.futures, hashlib + +def _assert_value(condition, error_msg): + if not condition: + raise ValueError(error_msg) + + +_H2_OPT_HEART_BEAT_TIME_DEFAULT = 25 +_H2_OPT_PORT_DEFAULT = 443 +_H2_MAX_FILE_SIZE = 1073741824 + +def h2_set_option(opt, value): + global _H2_MAX_FILE_SIZE + global _H2_OPT_HEART_BEAT_TIME_DEFAULT + global _H2_OPT_PORT_DEFAULT + if "heart_beat_interval" == opt: + _H2_OPT_HEART_BEAT_TIME_DEFAULT = value + else: + if "port" == opt: + _H2_OPT_PORT_DEFAULT = value + else: + if "max_file_size" == opt: + _H2_MAX_FILE_SIZE = value + + +class StreamHandler: + + def __init__(self): + pass + + def __enter__(self): + pass + + def __exit__(self, type, value, trace): + pass + + def get_content_length(self): + pass + + def next(self): + pass + + def has_next(self): + return False + + +class FileStreamHandler(StreamHandler): + + def __init__(self, filename, block_size=524288, opt_crc64=False): + self._FileStreamHandler__filename = filename + self._FileStreamHandler__block_size = block_size + self._FileStreamHandler__size = os.stat(filename).st_size + self._FileStreamHandler__opt_crc64 = opt_crc64 + self._FileStreamHandler__last_crc = 0 + self._FileStreamHandler__read_size = 0 + + def get_content_length(self): + return self._FileStreamHandler__size + + def __enter__(self): + logging.debug("open the file, filename:%s" % self._FileStreamHandler__filename) + self._FileStreamHandler__f = open(self._FileStreamHandler__filename, "rb") + self._FileStreamHandler__read_size = 0 + + def __exit__(self, type, value, trace): + if self._FileStreamHandler__f: + self._FileStreamHandler__f.close() + self._FileStreamHandler__f = None + + def next(self): + if not self._FileStreamHandler__f or self._FileStreamHandler__read_size >= self._FileStreamHandler__size: + return + data = self._FileStreamHandler__f.read(self._FileStreamHandler__block_size) + if data: + self._FileStreamHandler__read_size += len(data) + if self._FileStreamHandler__opt_crc64: + do_crc64 = crcmod.mkCrcFun(23270347676907615891L, initCrc=(self._FileStreamHandler__last_crc), xorOut=18446744073709551615L, rev=True) + self._FileStreamHandler__last_crc = do_crc64(data) + return data + + def has_next(self): + return self._FileStreamHandler__f.tell() < self._FileStreamHandler__size + + def get_crc64(self): + return self._FileStreamHandler__last_crc + + def get_read_size(self): + return self._FileStreamHandler__read_size + + +class H2Exception(Exception): + + def __init__(self, code, msg): + Exception.__init__(self, msg) + self._H2Exception__code = code + self._H2Exception__msg = msg + + def get_code(self): + return self._H2Exception__code + + def get_msg(self): + return self._H2Exception__msg + + def __name__(self): + return "H2Exception" + + +class UploadFileInfo: + + def __init__(self, local_filename, remote_filename=None, overwrite=True): + self.local_filename = local_filename + self.opt_overwrite = overwrite + if not remote_filename: + self.remote_filename = os.path.basename(local_filename) + else: + self.remote_filename = remote_filename + + def __name__(self): + return "UploadFileInfo" + + +class UploadFileResult: + + def __init__(self, code=None, exception=None, upload_size=None, total_size=None, file_store_id=None): + self.upload_size = upload_size + self.total_size = total_size + self.file_store_id = file_store_id + self.code = code + self.exception = exception + + def __name__(self): + return "UploadFileResult" + + +class H2FileUploadSink: + + def on_file_upload_start(self, id, upload_file_info, user_data): + pass + + def on_file_upload_end(self, id, upload_file_info, upload_file_result, user_data): + pass + + def on_file_upload_progress(self, id, upload_file_info, upload_file_result, user_data): + pass + + +class H2FileTask: + + def __init__(self, id, file_info, future_result): + self._H2FileTask__file_info = file_info + self._H2FileTask__future_result = future_result + self._H2FileTask__id = id + + def get_file_info(self): + return self._H2FileTask__file_info + + def get_future_result(self): + return self._H2FileTask__future_result + + def result(self, timeout=None): + return self._H2FileTask__future_result.result(timeout) + + def cancel(self): + self._H2FileTask__future_result.call() + + def get_id(self): + return self._H2FileTask__id + + def __name__(self): + return "H2FileTask" + + +class H2Stream: + + def __init__(self, client, id): + self._H2Stream__client = client + self._H2Stream__conn = None + self._H2Stream__total_sent_size = 0 + self._H2Stream__path = None + self._H2Stream__id = id + self._H2Stream__stream_id = None + self._H2Stream__x_request_id = None + self._H2Stream__x_data_stream_id = None + + def __name__(self): + return "H2Stream" + + def get_id(self): + return self._H2Stream__id + + def open(self, path, header): + _assert_value(path, "path is required") + with self._H2Stream__client._get_auth_lock(): + url = "/stream/open" + path + self._H2Stream__conn = self._H2Stream__client.get_connect() + self._H2Stream__total_sent_size = 0 + self._H2Stream__path = path + logging.debug("request url: %s" % url) + conn_header = self._H2Stream__client.get_default_header() + if header: + conn_header.update(header) + req_id = self._H2Stream__conn.request("GET", url, None, conn_header) + response = self._H2Stream__conn.get_response(req_id) + self._H2Stream__check_response(response) + self._H2Stream__x_request_id = response.headers["x-request-id"][0] + self._H2Stream__x_data_stream_id = response.headers["x-data-stream-id"][0] + logging.debug("x_request_id: %s" % self._H2Stream__x_request_id) + logging.debug("x_data_stream_id: %s" % self._H2Stream__x_data_stream_id) + return response + + def close(self, header): + logging.debug("close the stream") + final_header = {'x-request-id':self._H2Stream__x_request_id, 'x-data-stream-id':self._H2Stream__x_data_stream_id} + final_header.update(header) + req_id = self._H2Stream__conn.request("GET", "/stream/close/" + self._H2Stream__path, None, final_header) + response = self._H2Stream__conn.get_response(req_id) + self._H2Stream__check_response(response) + return response + + def send(self, headers, data_handler): + with self._H2Stream__client._get_auth_lock(): + url = "/stream/send" + self._H2Stream__path + logging.debug("request url: %s" % url) + self._H2Stream__stream_id = self._H2Stream__conn.putrequest("GET", url) + self._H2Stream__conn.putheader("x-request-id", (self._H2Stream__x_request_id), stream_id=(self._H2Stream__stream_id)) + self._H2Stream__conn.putheader("x-data-stream-id", (self._H2Stream__x_data_stream_id), stream_id=(self._H2Stream__stream_id)) + content_length = data_handler.get_content_length() + if content_length: + self._H2Stream__conn.putheader("content-length", "%s" % content_length, self._H2Stream__stream_id) + for k, v in headers.items(): + self._H2Stream__conn.putheader(k, v, self._H2Stream__stream_id) + + self._H2Stream__conn.endheaders(stream_id=(self._H2Stream__stream_id)) + with data_handler: + final = False + while not final: + data = data_handler.next() + if data == None or len(data) == 0: + break + final = not data_handler.has_next() + self._H2Stream__conn.send(data, final, stream_id=(self._H2Stream__stream_id)) + + response = self._H2Stream__conn.get_response(self._H2Stream__stream_id) + self._H2Stream__check_response(response) + return response + + def __check_response(self, response, msg=None): + if response.status != 200: + raise H2Exception(response.status, msg if msg else "fail to request http/2, code:%d" % response.status) + + def __str__(self): + return "H2Stream(id=%s,stream_x_id=%s,x_request_id=%s,x_data_stream_id:%s" % (self._H2Stream__id, + self._H2Stream__stream_id, self._H2Stream__x_request_id, self._H2Stream__x_data_stream_id) + + +class H2Client: + + def __init__(self, region, product_key, device_name, device_secret, client_id=None, opt_max_thread_num=4, endpoint=None): + _assert_value(region, "region is not empty") + _assert_value(product_key, "product_key is not empty") + _assert_value(device_name, "device_name is not empty") + self._H2Client__product_key = product_key + self._H2Client__device_name = device_name + self._H2Client__client_id = client_id + self._H2Client__device_secret = device_secret + self._H2Client__region = region + self._H2Client__endpoint = endpoint + self._H2Client__opt_free_idle_connect = False + self._H2Client__connected = False + self._H2Client__port = _H2_OPT_PORT_DEFAULT + self._H2Client__conn = None + self._H2Client__opt_heart_beat_time = _H2_OPT_HEART_BEAT_TIME_DEFAULT + self._H2Client__conn_lock = threading.RLock() + self._H2Client__lock = threading.RLock() + self._H2Client__stream_list = [] + self._H2Client__stream_list_lock = threading.RLock() + self._H2Client__thread_executor = concurrent.futures.ThreadPoolExecutor(max_workers=opt_max_thread_num) + self._H2Client__auth_lock = threading.RLock() + self._H2Client__id = 0 + self._H2Client__heart_beat_lock = threading.RLock() + self._H2Client__timer = None + + def get_endpoint(self): + return self._H2Client__endpoint + + def get_actual_endpoint(self): + return self._H2Client__generate_endpoint() + + def __generate_endpoint(self): + if self._H2Client__endpoint: + return self._H2Client__endpoint + return self._H2Client__product_key + ".iot-as-http2.%s.aliyuncs.com" % self._H2Client__region + + def open(self): + with self._H2Client__conn_lock: + if self._H2Client__conn: + logging.info("the client is opened") + return -1 + return self._H2Client__connect() + + def close(self): + with self._H2Client__conn_lock: + return self._H2Client__close_connect() + self._H2Client__close_all_streams() + + def upload_file_async(self, local_filename, remote_filename=None, over_write=True, upload_file_sink=None, upload_sink_user_data=None): + _assert_value(local_filename, "local_filename is required") + self._H2Client__check_file(local_filename) + file_info = UploadFileInfo(local_filename, remote_filename, over_write) + future_result = self._H2Client__thread_executor.submit(self._H2Client__post_file_task, file_info, upload_file_sink, upload_sink_user_data) + return H2FileTask(id, file_info, future_result) + + def upload_file_sync(self, local_filename, remote_filename=None, over_write=True, timeout=None, upload_file_sink=None, upload_sink_user_data=None): + self._H2Client__check_file(local_filename) + f = self.upload_file_async(local_filename, remote_filename, over_write, upload_file_sink, upload_sink_user_data) + return f.result(timeout) + + def __create_stream_id(self): + with self._H2Client__lock: + self._H2Client__id += 1 + return self._H2Client__id + + def new_stream(self): + return H2Stream(self, self._H2Client__create_stream_id()) + + def _get_auth_lock(self): + return self._H2Client__auth_lock + + def __crc_equal(self, value1, value2): + if value1 == value2: + return True + return self._H2Client__to_unsign(value1) == self._H2Client__to_unsign(value2) + + def __to_unsign(self, value): + if value > 0: + return value + return 18446744073709551616L + value + + def __check_file(self, path): + stat_info = os.stat(path) + if stat_info.st_size >= _H2_MAX_FILE_SIZE: + raise ValueError("maximum file size exceeded") + + def __post_file_task(self, file_info, sink=None, user_data=None): + local_filename = file_info.local_filename + remote_filename = file_info.remote_filename + over_write = file_info.opt_overwrite + fs = None + file_store_id = None + exception = None + code = 0 + x_file_upload_id = None + stream = self.new_stream() + self._H2Client__on_new_stream(stream) + try: + try: + logging.info("start to post file, local_filename:%s, remote:%s, over_write:%d" % (local_filename, remote_filename, over_write)) + if sink: + sink.on_file_upload_start(stream.get_id(), file_info, user_data) + header = {'x-file-name':remote_filename, + 'x-file-overwrite':"1" if over_write else "0"} + response = stream.open("/c/iot/sys/thing/file/upload", header) + x_file_upload_id = response.headers["x-file-upload-id"][0] + header = {"x-file-upload-id": x_file_upload_id} + fs = FileStreamHandler(local_filename, opt_crc64=True) + stream.send(header, fs) + response = stream.close(header) + remote_crc64 = int(response.headers["x-file-crc64ecma"][0]) + logging.info("crc64, local:%ld, remote:%ld" % (fs.get_crc64(), remote_crc64)) + if not self._H2Client__crc_equal(fs.get_crc64(), remote_crc64): + raise Exception("fail to check crc64, local:%ld, remote:%ld" % (fs.get_crc64(), remote_crc64)) + file_store_id = response.headers["x-file-store-id"][0] + logging.info("finish uploading file, local_filename:%s, remote:%s, over_write:%d, file_store_id:%s" % ( + local_filename, remote_filename, over_write, file_store_id)) + return UploadFileResult(code, exception, fs.get_read_size(), fs.get_content_length, file_store_id) + except H2Exception as e: + try: + logging.error("fail to upload the file, local_filename:%s, remote:%s, over_write:%d, x_file_upload_id:%s, stream:%s, code:%s, error:%s" % ( + local_filename, remote_filename, over_write, x_file_upload_id, stream, e.get_code(), e)) + return UploadFileResult(e.get_code(), exception, fs.get_read_size() if fs else -1, fs.get_content_length() if fs else -1, file_store_id) + finally: + e = None + del e + + except Exception as e: + try: + logging.error("fail to upload the file, local_filename:%s, remote:%s, over_write:%d, x_file_upload_id:%s, stream:%s, error:%s" % ( + local_filename, remote_filename, over_write, x_file_upload_id, stream, e)) + return UploadFileResult(-1, exception, fs.get_read_size() if fs else -1, fs.get_content_length() if fs else -1, file_store_id) + finally: + e = None + del e + + finally: + self._H2Client__on_free_stream(stream) + if sink: + result = UploadFileResult(code, exception, fs.get_read_size() if fs else -1, fs.get_content_length() if fs else -1, file_store_id) + sink.on_file_upload_end(stream.get_id(), file_info, result, user_data) + + def __connect(self): + with self._H2Client__conn_lock: + ctx = ssl.SSLContext(ssl.PROTOCOL_TLS) + h2_endpoint = self._H2Client__generate_endpoint() + logging.debug("http/2 endpoint:%s" % h2_endpoint) + self._H2Client__conn = hyper.HTTP20Connection(h2_endpoint, port=(self._H2Client__port), force_proto=(hyper.tls.NPN_PROTOCOL), + ssl_context=ctx) + return 0 + + def get_connect(self): + with self._H2Client__conn_lock: + if self._H2Client__conn: + return self._H2Client__conn + return self._H2Client__connect() + + def __fill_auth_header(self, header): + client_id = self._H2Client__client_id or self._H2Client__device_name + timestamp = str(int(time.time() * 1000)) + sign_content = "clientId" + client_id + "deviceName" + self._H2Client__device_name + "productKey" + self._H2Client__product_key + "timestamp" + timestamp + sign = hmac.new(self._H2Client__device_secret.encode("utf-8"), sign_content.encode("utf-8"), hashlib.sha256).hexdigest() + header["x-auth-param-timestamp"] = timestamp + header["x-auth-param-signmethod"] = "hmacsha256" + header["x-auth-param-sign"] = sign + header["x-auth-param-product-key"] = self._H2Client__product_key + header["x-auth-param-device-name"] = self._H2Client__device_name + header["x-auth-param-client-id"] = client_id + header["x-auth-name"] = "devicename" + return header + + def __fill_sdk_header(self, header): + header["x-sdk-version"] = "1.2.0" + header["x-sdk-version-name"] = "1.2.0" + header["x-sdk-platform"] = "python" + return header + + def get_default_header(self): + header = {} + self._H2Client__fill_auth_header(header) + self._H2Client__fill_sdk_header(header) + return header + + def __close_connect(self): + with self._H2Client__conn_lock: + if self._H2Client__conn: + self._H2Client__conn.close(0) + return 0 + + def __close_all_streams(self): + with self._H2Client__stream_list_lock: + self._H2Client__stream_list.clear() + self._H2Client__stream_list = None + self._H2Client__stop_heart_beat() + + def __on_new_stream(self, stream): + with self._H2Client__stream_list_lock: + self._H2Client__stream_list.append(stream) + if len(self._H2Client__stream_list) == 1: + self._H2Client__start_heart_beat() + + def __on_free_stream(self, stream): + with self._H2Client__stream_list_lock: + self._H2Client__stream_list.remove(stream) + if len(self._H2Client__stream_list) == 0: + self._H2Client__stop_heart_beat() + + def __start_heart_beat(self): + logging.debug("start heart_beat") + self._H2Client__schedule_heart_beat() + + def __handle_heart_beat(self): + logging.debug("heart...") + self._H2Client__conn.ping(b'PINGPONG') + self._H2Client__schedule_heart_beat() + + def __stop_heart_beat(self): + logging.debug("stop heart") + self._H2Client__cancel_heart_beat() + + def __schedule_heart_beat(self): + with self._H2Client__heart_beat_lock: + if self._H2Client__opt_heart_beat_time: + if self._H2Client__opt_heart_beat_time > 0: + self._H2Client__timer = threading.Timer(self._H2Client__opt_heart_beat_time, self._H2Client__handle_heart_beat) + self._H2Client__timer.start() + + def __cancel_heart_beat(self): + with self._H2Client__heart_beat_lock: + if self._H2Client__timer: + self._H2Client__timer.cancel() + self._H2Client__timer = None diff --git a/APPS_UNCOMPILED/lib/linkkit/linkkit.py b/APPS_UNCOMPILED/lib/linkkit/linkkit.py new file mode 100644 index 0000000..ffa6f52 --- /dev/null +++ b/APPS_UNCOMPILED/lib/linkkit/linkkit.py @@ -0,0 +1,1964 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/linkkit/linkkit.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 100637 bytes +import os, logging, threading, queue, urllib.request, urllib.parse, json, hashlib, hmac, random, ssl, socket, string, time, re, sys +from enum import Enum +import paho.mqtt.client as mqtt +from paho.mqtt.client import MQTTMessage +from linkkit import h2client +REQUIRED_MAJOR_VERSION = 3 +REQUIRED_MINOR_VERSION = 5 + +def lk_check_python_version(major_version, minor_version): + version = sys.version_info + if (version[0] < major_version or version[0]) == major_version: + if version[1] < minor_version: + print("WARNING: linkit requires Python %d.%d or higher, and the current version is %s" % (major_version, minor_version, sys.version)) + + +lk_check_python_version(REQUIRED_MAJOR_VERSION, REQUIRED_MINOR_VERSION) + +class LinkKit(object): + TAG_KEY = "attrKey" + TAG_VALUE = "attrValue" + + class LinkKitState(Enum): + INITIALIZED = 1 + CONNECTING = 2 + CONNECTED = 3 + DISCONNECTING = 4 + DISCONNECTED = 5 + DESTRUCTING = 6 + DESTRUCTED = 7 + + class StateError(Exception): + + def __init__(self, err): + Exception.__init__(self, err) + + class Shadow(object): + + def __init__(self): + self._Shadow__version = None + self._Shadow__timestamp = None + self._Shadow__state = None + self._Shadow__metadata = None + self._Shadow__latest_shadow_lock = threading.Lock() + self._Shadow__latest_received_time = None + self._Shadow__lastest_received_payload = None + + def get_version(self): + with self._Shadow__latest_shadow_lock: + return self._Shadow__version + + def get_metadata(self): + with self._Shadow__latest_shadow_lock: + return self._Shadow__metadata + + def get_state(self): + with self._Shadow__latest_shadow_lock: + return self._Shadow__state + + def set_state(self, state): + with self._Shadow__latest_shadow_lock: + self._Shadow__state = state + + def set_metadata(self, metadata): + with self._Shadow__latest_shadow_lock: + self._Shadow__metadata = metadata + + def set_version(self, version): + with self._Shadow__latest_shadow_lock: + self._Shadow__version = version + + def set_timestamp(self, timestamp): + with self._Shadow__latest_shadow_lock: + self._Shadow__timestamp = timestamp + + def set_latest_recevied_time(self, timestamp): + with self._Shadow__latest_shadow_lock: + self._Shadow__latest_received_time = timestamp + + def get_latest_recevied_time(self): + with self._Shadow__latest_shadow_lock: + return self._Shadow__latest_received_time + + def set_latest_recevied_payload(self, payload): + with self._Shadow__latest_shadow_lock: + self._Shadow__latest_received_payload = payload + + def get_latest_recevied_payload(self): + with self._Shadow__latest_shadow_lock: + return self._Shadow__latest_received_payload + + def to_dict(self): + return {'state':self._Shadow__state, 'metadata':self._Shadow__metadata, 'version':self._Shadow__version, 'timestamp':self._Shadow__timestamp} + + def to_json_string(self): + return json.dumps(self.to_dict()) + + class __HandlerTask(object): + + def __init__(self, logger=None): + self._HandlerTask__logger = logger + if self._HandlerTask__logger is not None: + self._HandlerTask__logger.info("HandlerTask init enter") + self._HandlerTask__message_queue = queue.Queue(20) + self._HandlerTask__cmd_callback = {} + self._HandlerTask__started = False + self._HandlerTask__exited = False + self._HandlerTask__thread = None + + def register_cmd_callback(self, cmd, callback): + if self._HandlerTask__started is False: + if cmd != "req_exit": + self._HandlerTask__cmd_callback[cmd] = callback + return 0 + return 1 + else: + return 2 + + def post_message(self, cmd, value): + self._HandlerTask__logger.debug("post_message :%r " % cmd) + if self._HandlerTask__started: + if self._HandlerTask__exited is False: + try: + self._HandlerTask__message_queue.put((cmd, value), timeout=5) + except queue.Full as e: + try: + self._HandlerTask__logger.error("queue full: %r" % e) + return False + finally: + e = None + del e + + self._HandlerTask__logger.debug("post_message success") + return True + self._HandlerTask__logger.debug("post_message fail started:%r,exited:%r" % (self._HandlerTask__started, self._HandlerTask__exited)) + return False + + def start(self): + if self._HandlerTask__logger is not None: + self._HandlerTask__logger.info("HandlerTask start") + if self._HandlerTask__started is False: + if self._HandlerTask__logger is not None: + self._HandlerTask__logger.info("HandlerTask try start") + self._HandlerTask__exited = False + self._HandlerTask__started = True + self._HandlerTask__message_queue = queue.Queue(20) + self._HandlerTask__thread = threading.Thread(target=(self._HandlerTask__thread_runnable)) + self._HandlerTask__thread.daemon = True + self._HandlerTask__thread.start() + return 0 + return 1 + + def stop(self): + if self._HandlerTask__started: + if self._HandlerTask__exited is False: + self._HandlerTask__exited = True + self._HandlerTask__message_queue.put(('req_exit', None)) + + def wait_stop(self): + if self._HandlerTask__started is True: + self._HandlerTask__thread.join() + + def __thread_runnable(self): + if self._HandlerTask__logger is not None: + self._HandlerTask__logger.debug("thread runnable enter") + while 1: + cmd, value = self._HandlerTask__message_queue.get() + self._HandlerTask__logger.debug("thread runnable pop cmd:%r" % cmd) + if cmd == "req_exit": + break + if self._HandlerTask__cmd_callback[cmd] is not None: + try: + self._HandlerTask__cmd_callback[cmd](value) + except Exception as e: + try: + if self._HandlerTask__logger is not None: + self._HandlerTask__logger.error("thread runnable raise exception:%s" % e) + finally: + e = None + del e + + self._HandlerTask__started = False + if self._HandlerTask__logger is not None: + self._HandlerTask__logger.debug("thread runnable exit") + + class LoopThread(object): + + def __init__(self, logger=None): + self._LoopThread__logger = logger + if logger is not None: + self._LoopThread__logger.info("LoopThread init enter") + self._LoopThread__callback = None + self._LoopThread__thread = None + self._LoopThread__started = False + self._LoopThread__req_wait = threading.Event() + if logger is not None: + self._LoopThread__logger.info("LoopThread init exit") + + def start(self, callback): + if self._LoopThread__started is True: + self._LoopThread__logger.info("LoopThread already ") + return 1 + self._LoopThread__callback = callback + self._LoopThread__thread = threading.Thread(target=(self._LoopThread__thread_main)) + self._LoopThread__thread.daemon = True + self._LoopThread__thread.start() + return 0 + + def stop(self): + self._LoopThread__req_wait.wait() + self._LoopThread__req_wait.clear() + + def __thread_main(self): + self._LoopThread__started = True + try: + if self._LoopThread__logger is not None: + self._LoopThread__logger.debug("LoopThread thread enter") + if self._LoopThread__callback is not None: + self._LoopThread__callback() + if self._LoopThread__logger is not None: + self._LoopThread__logger.debug("LoopThread thread exit") + except Exception as e: + try: + self._LoopThread__logger.error("LoopThread thread Exception:" + str(e)) + finally: + e = None + del e + + self._LoopThread__started = False + self._LoopThread__req_wait.set() + + class __H2FileUploadSink(h2client.H2FileUploadSink): + + def __init__(self, linkkit_instance): + self._H2FileUploadSink__lk_instance = linkkit_instance + + def on_file_upload_start(self, id, upload_file_info, user_data): + self._H2FileUploadSink__lk_instance._on_file_upload_start(id, upload_file_info, user_data) + + def on_file_upload_end(self, id, upload_file_info, upload_file_result, user_data): + self._H2FileUploadSink__lk_instance._on_file_upload_end(id, upload_file_info, upload_file_result, user_data) + + def on_file_upload_progress(self, id, upload_file_result, upload_file_info, user_data): + self._H2FileUploadSink__lk_instance._on_file_upload_progress(id, upload_file_result, upload_file_info, user_data) + + def _on_file_upload_start(self, id, upload_file_info, user_data): + if self._LinkKit__on_file_upload_begin != None: + self._LinkKit__on_file_upload_begin(id, upload_file_info, self._LinkKit__user_data) + + def _on_file_upload_end(self, id, upload_file_info, upload_file_result, user_data): + if self._LinkKit__on_file_upload_end != None: + self._LinkKit__on_file_upload_end(id, upload_file_info, upload_file_result, self._LinkKit__user_data) + + def _on_file_upload_progress(self, id, upload_file_result, upload_file_info, user_data): + pass + + class __LinkKitLog(object): + + def __init__(self): + self._LinkKitLog__logger = logging.getLogger("linkkit") + self._LinkKitLog__enabled = False + + def enable_logger(self): + self._LinkKitLog__enabled = True + + def disable_logger(self): + self._LinkKitLog__enabled = False + + def is_enabled(self): + return self._LinkKitLog__enabled + + def config_logger(self, level): + self._LinkKitLog__logger.setLevel(level) + + def debug(self, fmt, *args): + if self._LinkKitLog__enabled: + (self._LinkKitLog__logger.debug)(fmt, *args) + + def warring(self, fmt, *args): + if self._LinkKitLog__enabled: + (self._LinkKitLog__logger.warning)(fmt, *args) + + def info(self, fmt, *args): + if self._LinkKitLog__enabled: + (self._LinkKitLog__logger.info)(fmt, *args) + + def error(self, fmt, *args): + if self._LinkKitLog__enabled: + (self._LinkKitLog__logger.error)(fmt, *args) + + def critical(self, fmt, *args): + if self._LinkKitLog__enabled: + (self._LinkKitLog__logger.critical)(fmt, *args) + + _LinkKit__USER_TOPIC_PREFIX = "/%s/%s/%s" + _LinkKit__ALIYUN_BROKER_CA_DATA = "-----BEGIN CERTIFICATE-----\nMIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jvb3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAwMDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZjc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavpxy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdGsnUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJU26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N89iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOzyj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymPAbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUadDKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbMEHMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==\n-----END CERTIFICATE-----" + + def __init__(self, host_name, product_key, device_name, device_secret, product_secret=None, user_data=None): + self._LinkKit__just_for_pycharm_autocomplete = False + + def __str_is_empty(value): + if value is None or value == "": + return True + return False + + if _LinkKit__str_is_empty(host_name): + raise ValueError("host_name wrong") + if _LinkKit__str_is_empty(product_key): + raise ValueError("product key wrong") + if _LinkKit__str_is_empty(device_name): + raise ValueError("device name wrong") + if _LinkKit__str_is_empty(device_secret): + if _LinkKit__str_is_empty(product_secret): + raise ValueError("device secret & product secret are both empty") + self._LinkKit__link_log = LinkKit._LinkKit__LinkKitLog() + self._LinkKit__PahoLog = logging.getLogger("Paho") + self._LinkKit__PahoLog.setLevel(logging.DEBUG) + self._LinkKit__host_name = host_name + self._LinkKit__product_key = product_key + self._LinkKit__device_name = device_name + self._LinkKit__device_secret = device_secret + self._LinkKit__product_secret = product_secret + self._LinkKit__user_data = user_data + self._LinkKit__device_interface_info = "" + self._LinkKit__device_mac = None + self._LinkKit__cellular_IMEI = None + self._LinkKit__cellular_ICCID = None + self._LinkKit__cellular_IMSI = None + self._LinkKit__cellular_MSISDN = None + self._LinkKit__mqtt_client = None + self._LinkKit__sdk_version = "1.2.0" + self._LinkKit__sdk_program_language = "Python" + self._LinkKit__endpoint = None + self._LinkKit__h2_endpoint = None + self._LinkKit__mqtt_port = 1883 + self._LinkKit__mqtt_protocol = "MQTTv311" + self._LinkKit__mqtt_transport = "TCP" + self._LinkKit__mqtt_secure = "TLS" + self._LinkKit__mqtt_keep_alive = 60 + self._LinkKit__mqtt_clean_session = True + self._LinkKit__mqtt_max_inflight_message = 20 + self._LinkKit__mqtt_max_queued_message = 40 + self._LinkKit__mqtt_auto_reconnect_min_sec = 1 + self._LinkKit__mqtt_auto_reconnect_max_sec = 60 + self._LinkKit__mqtt_auto_reconnect_sec = 0 + self._LinkKit__mqtt_request_timeout = 10 + self._LinkKit__linkkit_state = LinkKit.LinkKitState.INITIALIZED + self._LinkKit__aliyun_broker_ca_data = self._LinkKit__ALIYUN_BROKER_CA_DATA + self._LinkKit__latest_shadow = LinkKit.Shadow() + self._LinkKit__on_device_dynamic_register = None + self._LinkKit__on_connect = None + self._LinkKit__on_disconnect = None + self._LinkKit__on_publish_topic = None + self._LinkKit__on_subscribe_topic = None + self._LinkKit__on_unsubscribe_topic = None + self._LinkKit__on_topic_message = None + self._LinkKit__on_topic_rrpc_message = None + self._LinkKit__on_subscribe_rrpc_topic = None + self._LinkKit__on_unsubscribe_rrpc_topic = None + self._LinkKit__on_thing_create = None + self._LinkKit__on_thing_enable = None + self._LinkKit__on_thing_disable = None + self._LinkKit__on_thing_raw_data_arrived = None + self._LinkKit__on_thing_raw_data_post = None + self._LinkKit__on_thing_call_service = None + self._LinkKit__on_thing_prop_changed = None + self._LinkKit__on_thing_event_post = None + self._LinkKit__on_thing_prop_post = None + self._LinkKit__on_thing_shadow_get = None + self._LinkKit__on_thing_device_info_update = None + self._LinkKit__on_thing_device_info_delete = None + self._LinkKit__on_file_upload_begin = None + self._LinkKit__on_file_upload_end = None + self._LinkKit__user_topics = {} + self._LinkKit__user_topics_subscribe_request = {} + self._LinkKit__user_topics_unsubscribe_request = {} + self._LinkKit__user_topics_request_lock = threading.Lock() + self._LinkKit__user_topics_unsubscribe_request_lock = threading.Lock() + self._LinkKit__user_rrpc_topics = {} + self._LinkKit__user_rrpc_topics_lock = threading.RLock() + self._LinkKit__user_rrpc_topics_subscribe_request = {} + self._LinkKit__user_rrpc_topics_unsubscribe_request = {} + self._LinkKit__user_rrpc_topics_subscribe_request_lock = threading.RLock() + self._LinkKit__user_rrpc_topics_unsubscribe_request_lock = threading.RLock() + self._LinkKit__user_rrpc_request_ids = [] + self._LinkKit__user_rrpc_request_id_index_map = {} + self._LinkKit__user_rrpc_request_ids_lock = threading.RLock() + self._LinkKit__user_rrpc_request_max_len = 100 + self._LinkKit__thing_topic_prop_post = "/sys/%s/%s/thing/event/property/post" % ( + self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_topic_prop_post_reply = self._LinkKit__thing_topic_prop_post + "_reply" + self._LinkKit__thing_topic_prop_set = "/sys/%s/%s/thing/service/property/set" % ( + self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_topic_prop_set_reply = self._LinkKit__thing_topic_prop_set + "_reply" + self._LinkKit__thing_topic_prop_get = "/sys/%s/%s/thing/service/property/get" % ( + self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_topic_event_post_pattern = "/sys/%s/%s/thing/event/%s/post" + self._LinkKit__thing_prop_post_mid = {} + self._LinkKit__thing_prop_post_mid_lock = threading.Lock() + self._LinkKit__thing_prop_set_reply_mid = {} + self._LinkKit__thing_prop_set_reply_mid_lock = threading.Lock() + self._LinkKit__thing_topic_event_post = {} + self._LinkKit__thing_topic_event_post_reply = set() + self._LinkKit__thing_events = set() + self._LinkKit__thing_request_id_max = 1000000 + self._LinkKit__thing_request_value = 0 + self._LinkKit__thing_request_id = {} + self._LinkKit__thing_request_id_lock = threading.Lock() + self._LinkKit__thing_event_post_mid = {} + self._LinkKit__thing_event_post_mid_lock = threading.Lock() + self._LinkKit__thing_topic_shadow_get = "/shadow/get/%s/%s" % ( + self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_topic_shadow_update = "/shadow/update/%s/%s" % ( + self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_shadow_mid = {} + self._LinkKit__thing_shadow_mid_lock = threading.Lock() + self._LinkKit__thing_topic_service_pattern = "/sys/%s/%s/thing/service/%s" + self._LinkKit__thing_topic_services = set() + self._LinkKit__thing_topic_services_reply = set() + self._LinkKit__thing_services = set() + self._LinkKit__thing_answer_service_mid = {} + self._LinkKit__thing_answer_service_mid_lock = threading.Lock() + self._LinkKit__thing_topic_raw_up = "/sys/%s/%s/thing/model/up_raw" % (self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_topic_raw_up_reply = self._LinkKit__thing_topic_raw_up + "_reply" + self._LinkKit__thing_topic_raw_down = "/sys/%s/%s/thing/model/down_raw" % (self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_topic_raw_down_reply = self._LinkKit__thing_topic_raw_down + "_reply" + self._LinkKit__thing_raw_up_mid = {} + self._LinkKit__thing_raw_up_mid_lock = threading.Lock() + self._LinkKit__thing_raw_down_reply_mid = {} + self._LinkKit__thing_raw_down_reply_mid_lock = threading.Lock() + self._LinkKit__thing_topic_update_device_info_up = "/sys/%s/%s/thing/deviceinfo/update" % (self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_topic_update_device_info_reply = self._LinkKit__thing_topic_update_device_info_up + "_reply" + self._LinkKit__thing_topic_delete_device_info_up = "/sys/%s/%s/thing/deviceinfo/delete" % (self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__thing_topic_delete_device_info_reply = self._LinkKit__thing_topic_delete_device_info_up + "_reply" + self._LinkKit__thing_update_device_info_up_mid = {} + self._LinkKit__thing_update_device_info_up_mid_lock = threading.Lock() + self._LinkKit__thing_delete_device_info_up_mid = {} + self._LinkKit__thing_delete_device_info_up_mid_lock = threading.Lock() + self._LinkKit__thing_properties_set = set() + self._LinkKit__thing_properties_get = set() + self._LinkKit__thing_properties_post = set() + self._LinkKit__thing_subscribe_sys_request = False + self._LinkKit__thing_subscribe_sys_request_mid = {} + self._LinkKit__thing_subscribe_sys_request_lock = threading.Lock() + self._LinkKit__thing_setup_state = False + self._LinkKit__thing_raw_only = False + self._LinkKit__thing_enable_state = False + if self._LinkKit__just_for_pycharm_autocomplete: + self._LinkKit__mqtt_client = mqtt.Client() + self._LinkKit__device_info_topic = "/sys/%s/%s/thing/deviceinfo/update" % (self._LinkKit__product_key, self._LinkKit__device_name) + self._LinkKit__device_info_topic_reply = self._LinkKit__device_info_topic + "_reply" + self._LinkKit__device_info_mid_lock = threading.Lock() + self._LinkKit__device_info_mid = {} + self._LinkKit__connect_async_req = False + self._LinkKit__worker_loop_exit_req = False + self._LinkKit__worker_loop_runing_state = False + self._LinkKit__worker_loop_exit_req_lock = threading.Lock() + self._LinkKit__loop_thread = LinkKit.LoopThread(self._LinkKit__link_log) + self._LinkKit__handler_task = LinkKit._LinkKit__HandlerTask(self._LinkKit__link_log) + self._LinkKit__handler_task_cmd_on_connect = "on_connect" + self._LinkKit__handler_task_cmd_on_disconnect = "on_disconnect" + self._LinkKit__handler_task_cmd_on_message = "on_message" + self._LinkKit__handler_task_cmd_on_publish = "on_publish" + self._LinkKit__handler_task_cmd_on_subscribe = "on_subscribe" + self._LinkKit__handler_task_cmd_on_unsubscribe = "on_unsubscribe" + self._LinkKit__handler_task.register_cmd_callback(self._LinkKit__handler_task_cmd_on_connect, self._LinkKit__handler_task_on_connect_callback) + self._LinkKit__handler_task.register_cmd_callback(self._LinkKit__handler_task_cmd_on_disconnect, self._LinkKit__handler_task_on_disconnect_callback) + self._LinkKit__handler_task.register_cmd_callback(self._LinkKit__handler_task_cmd_on_message, self._LinkKit__handler_task_on_message_callback) + self._LinkKit__handler_task.register_cmd_callback(self._LinkKit__handler_task_cmd_on_publish, self._LinkKit__handler_task_on_publish_callback) + self._LinkKit__handler_task.register_cmd_callback(self._LinkKit__handler_task_cmd_on_subscribe, self._LinkKit__handler_task_on_subscribe_callback) + self._LinkKit__handler_task.register_cmd_callback(self._LinkKit__handler_task_cmd_on_unsubscribe, self._LinkKit__handler_task_on_unsubscribe_callback) + self._LinkKit__handler_task.start() + self._LinkKit__h2_client = None + self._LinkKit__h2_client_lock = threading.RLock() + + @property + def on_device_dynamic_register(self): + pass + + @on_device_dynamic_register.setter + def on_device_dynamic_register(self, value): + self._LinkKit__on_device_dynamic_register = value + + @property + def on_connect(self): + return self._LinkKit__on_connect + + @on_connect.setter + def on_connect(self, value): + self._LinkKit__on_connect = value + + @property + def on_disconnect(self): + return self._LinkKit__on_disconnect + + @on_disconnect.setter + def on_disconnect(self, value): + self._LinkKit__on_disconnect = value + + @property + def on_publish_topic(self): + pass + + @on_publish_topic.setter + def on_publish_topic(self, value): + self._LinkKit__on_publish_topic = value + + @property + def on_subscribe_topic(self): + pass + + @on_subscribe_topic.setter + def on_subscribe_topic(self, value): + self._LinkKit__on_subscribe_topic = value + + @property + def on_unsubscribe_topic(self): + pass + + @on_unsubscribe_topic.setter + def on_unsubscribe_topic(self, value): + self._LinkKit__on_unsubscribe_topic = value + + @property + def on_topic_message(self): + pass + + @on_topic_message.setter + def on_topic_message(self, value): + self._LinkKit__on_topic_message = value + + @property + def on_topic_rrpc_message(self): + pass + + @on_topic_rrpc_message.setter + def on_topic_rrpc_message(self, value): + self._LinkKit__on_topic_rrpc_message = value + + @property + def on_thing_create(self): + pass + + @on_thing_create.setter + def on_thing_create(self, value): + self._LinkKit__on_thing_create = value + + @property + def on_thing_enable(self): + pass + + @on_thing_enable.setter + def on_thing_enable(self, value): + self._LinkKit__on_thing_enable = value + + @property + def on_thing_disable(self): + pass + + @on_thing_disable.setter + def on_thing_disable(self, value): + self._LinkKit__on_thing_disable = value + + @property + def on_thing_raw_data_arrived(self): + pass + + @on_thing_raw_data_arrived.setter + def on_thing_raw_data_arrived(self, value): + self._LinkKit__on_thing_raw_data_arrived = value + + @property + def on_thing_raw_data_post(self): + return self._LinkKit__on_thing_raw_data_post + + @property + def on_thing_device_info_update(self): + return self._LinkKit__on_thing_device_info_update + + @on_thing_device_info_update.setter + def on_thing_device_info_update(self, value): + self._LinkKit__on_thing_device_info_update = value + + @property + def on_thing_device_info_delete(self): + return self._LinkKit__on_thing_device_info_delete + + @on_thing_device_info_delete.setter + def on_thing_device_info_delete(self, value): + self._LinkKit__on_thing_device_info_delete = value + + @on_thing_raw_data_post.setter + def on_thing_raw_data_post(self, value): + self._LinkKit__on_thing_raw_data_post = value + + @property + def on_thing_call_service(self): + pass + + @on_thing_call_service.setter + def on_thing_call_service(self, value): + self._LinkKit__on_thing_call_service = value + + @property + def on_thing_prop_changed(self): + pass + + @on_thing_prop_changed.setter + def on_thing_prop_changed(self, value): + self._LinkKit__on_thing_prop_changed = value + + @property + def on_thing_event_post(self): + return self._LinkKit__on_thing_event_post + + @on_thing_event_post.setter + def on_thing_event_post(self, value): + self._LinkKit__on_thing_event_post = value + + @property + def on_thing_prop_post(self): + return self._LinkKit__on_thing_prop_post + + @on_thing_prop_post.setter + def on_thing_prop_post(self, value): + self._LinkKit__on_thing_prop_post = value + + @property + def on_thing_shadow_get(self): + return self._LinkKit__on_thing_shadow_get + + @on_thing_shadow_get.setter + def on_thing_shadow_get(self, value): + self._LinkKit__on_thing_shadow_get = value + + @property + def on_file_upload_begin(self): + return self._LinkKit__on_file_upload_begin + + @on_file_upload_begin.setter + def on_file_upload_begin(self, value): + self._LinkKit__on_file_upload_begin = value + + @property + def on_file_upload_end(self): + return self._LinkKit__on_file_upload_end + + @on_file_upload_end.setter + def on_file_upload_end(self, value): + self._LinkKit__on_file_upload_end = value + + def enable_logger(self, level): + self._LinkKit__link_log.config_logger(level) + self._LinkKit__link_log.enable_logger() + if self._LinkKit__mqtt_client is not None: + self._LinkKit__mqtt_client.enable_logger(self._LinkKit__PahoLog) + self._LinkKit__PahoLog.setLevel(level) + + def disable_logger(self): + self._LinkKit__link_log.disable_logger() + if self._LinkKit__mqtt_client is not None: + self._LinkKit__mqtt_client.disable_logger() + + def config_logger(self, level): + self._LinkKit__link_log.config_logger(level) + if self._LinkKit__mqtt_client is not None: + self._LinkKit__PahoLog.setLevel(level) + + def config_http2(self, endpoint=None): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.INITIALIZED: + raise LinkKit.StateError("not in INITIALIZED state") + self._LinkKit__h2_endpoint = endpoint + + def config_mqtt(self, port=1883, protocol='MQTTv311', transport='TCP', secure='TLS', keep_alive=60, clean_session=True, max_inflight_message=20, max_queued_message=40, auto_reconnect_min_sec=1, auto_reconnect_max_sec=60, cadata=None, endpoint=None): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.INITIALIZED: + raise LinkKit.StateError("not in INITIALIZED state") + else: + if port < 1 or port > 65535: + raise ValueError("port wrong") + elif protocol != "MQTTv311": + if protocol != "MQTTv31": + raise ValueError("protocol wrong") + if transport != "TCP": + raise ValueError("transport wrong") + if secure != "TLS" and secure != "": + raise ValueError("secure wrong") + if keep_alive < 60 or keep_alive > 180: + raise ValueError("keep_alive range wrong") + if clean_session is not True and clean_session is not False: + raise ValueError("clean session wrong") + if max_queued_message < 0: + raise ValueError("max_queued_message wrong") + if max_inflight_message < 0: + raise ValueError("max_inflight_message wrong") + if auto_reconnect_min_sec < 1 or auto_reconnect_min_sec > 7200: + raise ValueError("auto_reconnect_min_sec wrong") + if auto_reconnect_max_sec < 1 or auto_reconnect_max_sec > 7200: + raise ValueError("auto_reconnect_max_sec wrong") + if auto_reconnect_min_sec > auto_reconnect_max_sec: + raise ValueError("auto_reconnect_max_sec less than auto_reconnect_min_sec") + self._LinkKit__link_log.info("config_mqtt enter") + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.INITIALIZED: + if port is not None: + self._LinkKit__mqtt_port = port + if protocol is not None: + self._LinkKit__mqtt_protocol = protocol + if transport is not None: + self._LinkKit__mqtt_transport = transport + if secure is not None: + self._LinkKit__mqtt_secure = secure + if keep_alive is not None: + self._LinkKit__mqtt_keep_alive = keep_alive + if clean_session is not None: + self._LinkKit__mqtt_clean_session = clean_session + if max_inflight_message is not None: + self._LinkKit__mqtt_max_inflight_message = max_inflight_message + if max_queued_message is not None: + self._LinkKit__mqtt_max_queued_message = max_queued_message + if auto_reconnect_min_sec is not None: + self._LinkKit__mqtt_auto_reconnect_min_sec = auto_reconnect_min_sec + if auto_reconnect_max_sec is not None: + self._LinkKit__mqtt_auto_reconnect_max_sec = auto_reconnect_max_sec + if cadata is not None: + self._LinkKit__aliyun_broker_ca_data = cadata + self._LinkKit__endpoint = endpoint + + def config_device_info(self, interface_info): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.INITIALIZED: + raise LinkKit.StateError("LinkKit object not in INITIALIZED") + if not isinstance(interface_info, str): + raise ValueError("interface info must be string") + if len(interface_info) > 160: + return 1 + self._LinkKit__device_interface_info = interface_info + return 0 + + def get_product(self): + return self._LinkKit__product_key + + def get_device_name(self): + return self._LinkKit__device_name + + def get_endpoint(self): + return self._LinkKit__endpoint + + def get_h2_endpoint(self): + return self._LinkKit__h2_endpoint + + def get_actual_endpoint(self): + return self._LinkKit__generate_endpoint() + + def get_actual_h2_endpoint(self): + if self._LinkKit__h2_client: + return self.get_actual_endpoint() + return self._LinkKit__try_generate_custom_h2_endpoint() + + def __load_json(self, payload): + return json.loads(self._LinkKit__to_str(payload)) + + def __to_str(self, payload): + if type(payload) is bytes: + return str(payload, "utf-8") + return payload + + def __try_open_h2_client(self): + with self._LinkKit__h2_client_lock: + if not self._LinkKit__h2_client: + self._LinkKit__h2_client = h2client.H2Client((self._LinkKit__host_name), (self._LinkKit__product_key), + (self._LinkKit__device_name), + (self._LinkKit__device_secret), + endpoint=(self._LinkKit__try_generate_custom_h2_endpoint())) + self._LinkKit__h2_client.open() + + def __try_generate_custom_h2_endpoint(self): + if self._LinkKit__h2_endpoint: + return self._LinkKit__h2_endpoint + if self._LinkKit__endpoint: + if self._LinkKit__endpoint.find(".iot-as-mqtt.") > 0: + return self._LinkKit__endpoint.replace(".iot-as-mqtt.", ".iot-as-http2.") + return + + def __try_close_h2_client(self): + with self._LinkKit__h2_client_lock: + if self._LinkKit__h2_client: + self._LinkKit__h2_client.close() + self._LinkKit__h2_client = None + + def _get_h2_client(self): + self._LinkKit__try_open_h2_client() + return self._LinkKit__h2_client + + def upload_file_sync(self, local_filename, remote_filename=None, over_write=True, timeout=None): + """ + upload a file to the cloud and block the request + + Parameters + ---------- + local_filename : str + the path of local file + remote_filename: str, optional + the filename on the cloud + over_write: boolean, optional + if true, overwrite the file. The default value is True + timeout: int or float, optional + timeout can be an int or float. If timeout is not specified or None, there is no limit to the wait time. + + Returns + ---------- + UploadFileResult + upload_size, total_size, file_store_id, code, exception + code is 0 if success. + + Exceptions + ---------- + ValueError + Exception + + """ + sink = self._LinkKit__H2FileUploadSink(self) + self._LinkKit__try_open_h2_client() + return self._LinkKit__h2_client.upload_file_sync(local_filename, remote_filename, over_write, timeout, sink, None) + + def upload_file_async(self, local_filename, remote_filename=None, over_write=True): + sink = self._LinkKit__H2FileUploadSink(self) + self._LinkKit__try_open_h2_client() + return self._LinkKit__h2_client.upload_file_async(local_filename, remote_filename, over_write, sink, None) + + def __upload_device_interface_info(self): + request_id = self._LinkKit__get_thing_request_id() + payload = {'id':request_id, + 'version':"1.0", + 'params':[ + {'domain':"SYSTEM", + 'attrKey':"SYS_SDK_LANGUAGE", + 'attrValue':self._LinkKit__sdk_program_language}, + {'domain':"SYSTEM", + 'attrKey':"SYS_LP_SDK_VERSION", + 'attrValue':self._LinkKit__sdk_version}, + {'domain':"SYSTEM", + 'attrKey':"SYS_SDK_IF_INFO", + 'attrValue':self._LinkKit__device_interface_info}], + 'method':"thing.deviceinfo.update"} + with self._LinkKit__device_info_mid_lock: + rc, mid = self._LinkKit__mqtt_client.publish(self._LinkKit__device_info_topic, json.dumps(payload), 0) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__device_info_mid[mid] = self._LinkKit__timestamp() + return 0 + return 1 + + def destruct(self): + self._LinkKit__try_close_h2_client() + if self._LinkKit__linkkit_state is LinkKit.LinkKitState.DESTRUCTED: + raise LinkKit.StateError("LinkKit object has already destructed") + else: + self._LinkKit__link_log.debug("destruct enter") + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.CONNECTED or self._LinkKit__linkkit_state == LinkKit.LinkKitState.CONNECTING: + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DESTRUCTING + if self._LinkKit__connect_async_req: + with self._LinkKit__worker_loop_exit_req_lock: + self._LinkKit__worker_loop_exit_req = True + if self._LinkKit__mqtt_client is not None: + self._LinkKit__mqtt_client.disconnect() + self._LinkKit__handler_task.wait_stop() + else: + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DESTRUCTING + if self._LinkKit__connect_async_req: + with self._LinkKit__worker_loop_exit_req_lock: + self._LinkKit__worker_loop_exit_req = True + self._LinkKit__handler_task.stop() + self._LinkKit__handler_task.wait_stop() + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DESTRUCTED + + def destroy(self): + self.destruct() + + def check_state(self): + return self._LinkKit__linkkit_state + + @staticmethod + def __generate_random_str(randomlength=16): + """ + generate radom string + """ + random_str = "" + for i in range(randomlength): + random_str += random.choice(string.digits + string.ascii_letters) + + return random_str + + def __dynamic_register_device(self): + pk = self._LinkKit__product_key + ps = self._LinkKit__product_secret + dn = self._LinkKit__device_name + random_str = self._LinkKit__generate_random_str(15) + context = ssl.create_default_context((ssl.Purpose.CLIENT_AUTH), cadata=(self._LinkKit__aliyun_broker_ca_data)) + sign_content = "deviceName%sproductKey%srandom%s" % (dn, pk, random_str) + sign = hmac.new(ps.encode("utf-8"), sign_content.encode("utf-8"), hashlib.sha256).hexdigest() + post_data = { + 'productKey': pk, + 'deviceName': dn, + 'random': random_str, + 'sign': sign, + 'signMethod': '"hmacsha256"'} + data = urllib.parse.urlencode(post_data) + data = data.encode("ascii") + request_url = "https://iot-auth.%s.aliyuncs.com/auth/register/device" % self._LinkKit__host_name + with urllib.request.urlopen(request_url, data, context=context) as f: + reply_data = f.read().decode("utf-8") + reply_obj = self._LinkKit__load_json(reply_data) + if reply_obj["code"] == 200: + reply_obj_data = reply_obj["data"] + if reply_obj_data is not None: + return ( + 0, reply_obj_data["deviceSecret"]) + else: + return ( + 1, reply_obj["message"]) + + def __config_mqtt_client_internal(self): + self._LinkKit__link_log.info("start connect") + timestamp = str(int(time.time())) + if self._LinkKit__mqtt_secure == "TLS": + securemode = 2 + else: + securemode = 3 + if self._LinkKit__device_interface_info: + sii_option = "sii=%s," % self._LinkKit__device_interface_info + else: + sii_option = "" + client_id = "%s&%s|securemode=%d,signmethod=hmacsha1,ext=1,lan=%s,_v=%s,%stimestamp=%s|" % ( + self._LinkKit__product_key, self._LinkKit__device_name, securemode, self._LinkKit__sdk_program_language, + self._LinkKit__sdk_version, sii_option, timestamp) + username = self._LinkKit__device_name + "&" + self._LinkKit__product_key + sign_content = "clientId%sdeviceName%sproductKey%stimestamp%s" % ( + self._LinkKit__product_key + "&" + self._LinkKit__device_name, + self._LinkKit__device_name, + self._LinkKit__product_key, + timestamp) + password = hmac.new(self._LinkKit__device_secret.encode("utf-8"), sign_content.encode("utf-8"), hashlib.sha1).hexdigest() + mqtt_protocol_version = mqtt.MQTTv311 + if self._LinkKit__mqtt_protocol == "MQTTv311": + mqtt_protocol_version = mqtt.MQTTv311 + else: + if self._LinkKit__mqtt_protocol == "MQTTv31": + mqtt_protocol_version = mqtt.MQTTv31 + self._LinkKit__mqtt_client = mqtt.Client(client_id=client_id, clean_session=(self._LinkKit__mqtt_clean_session), + protocol=mqtt_protocol_version) + if self._LinkKit__link_log.is_enabled(): + self._LinkKit__mqtt_client.enable_logger(self._LinkKit__PahoLog) + self._LinkKit__mqtt_client.username_pw_set(username, password) + self._LinkKit__mqtt_client.on_connect = self._LinkKit__on_internal_connect + self._LinkKit__mqtt_client.on_disconnect = self._LinkKit__on_internal_disconnect + self._LinkKit__mqtt_client.on_message = self._LinkKit__on_internal_message + self._LinkKit__mqtt_client.on_publish = self._LinkKit__on_internal_publish + self._LinkKit__mqtt_client.on_subscribe = self._LinkKit__on_internal_subscribe + self._LinkKit__mqtt_client.on_unsubscribe = self._LinkKit__on_internal_unsubscribe + self._LinkKit__mqtt_client.reconnect_delay_set(self._LinkKit__mqtt_auto_reconnect_min_sec, self._LinkKit__mqtt_auto_reconnect_max_sec) + self._LinkKit__mqtt_client.max_queued_messages_set(self._LinkKit__mqtt_max_queued_message) + self._LinkKit__mqtt_client.max_inflight_messages_set(self._LinkKit__mqtt_max_inflight_message) + self._LinkKit__link_log.debug("current working directory:" + os.getcwd()) + if self._LinkKit__mqtt_secure == "TLS": + context = ssl.create_default_context((ssl.Purpose.CLIENT_AUTH), cadata=(self._LinkKit__aliyun_broker_ca_data)) + self._LinkKit__mqtt_client.tls_set_context(context) + self._LinkKit__host_name_internal = self._LinkKit__generate_endpoint() + + def __generate_endpoint(self): + if self._LinkKit__endpoint: + return self._LinkKit__endpoint + if self._LinkKit__host_name == "127.0.0.1" or self._LinkKit__host_name == "localhost": + return self._LinkKit__host_name + return "%s.iot-as-mqtt.%s.aliyuncs.com" % ( + self._LinkKit__product_key, self._LinkKit__host_name) + + def connect(self): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.INITIALIZED: + raise LinkKit.StateError("not in INITIALIZED state") + if self._LinkKit__device_secret is None or self._LinkKit__device_secret == "": + if not self._LinkKit__product_secret is None: + if self._LinkKit__product_secret == "": + raise ValueError("device Secret & product secret both empty") + rc, value = self._LinkKit__dynamic_register_device() + if self._LinkKit__on_device_dynamic_register is None: + raise Exception("user not give on_device_dynamic_register") + else: + try: + self._LinkKit__on_device_dynamic_register(rc, value, self._LinkKit__user_data) + if rc == 0: + self._LinkKit__device_secret = value + else: + self._LinkKit__link_log.error("dynamic register device fail:" + value) + return 1 + except Exception as e: + try: + self._LinkKit__link_log.error(e) + return 2 + finally: + e = None + del e + + self._LinkKit__config_mqtt_client_internal() + self._LinkKit__mqtt_client.connect(host=(self._LinkKit__host_name_internal), port=(self._LinkKit__mqtt_port), keepalive=(self._LinkKit__mqtt_keep_alive)) + return 0 + + def __connect_async_internal(self): + if self._LinkKit__device_secret is None or self._LinkKit__device_secret == "": + if not self._LinkKit__product_secret is None: + if self._LinkKit__product_secret == "": + raise ValueError("device Secret & product secret both empty") + rc, value = self._LinkKit__dynamic_register_device() + if self._LinkKit__on_device_dynamic_register is None: + raise Exception("user not give on_device_dynamic_register") + else: + try: + self._LinkKit__on_device_dynamic_register(rc, value, self._LinkKit__user_data) + if rc == 0: + self._LinkKit__device_secret = value + else: + self._LinkKit__link_log.error("dynamic register device fail:" + value) + return 1 + except Exception as e: + try: + self._LinkKit__link_log.error(e) + return 2 + finally: + e = None + del e + + self._LinkKit__config_mqtt_client_internal() + self._LinkKit__mqtt_client.connect_async(host=(self._LinkKit__host_name_internal), port=(self._LinkKit__mqtt_port), keepalive=(self._LinkKit__mqtt_keep_alive)) + self._LinkKit__mqtt_client.loop_start() + + def connect_async(self): + self._LinkKit__link_log.debug("connect_async") + if self._LinkKit__linkkit_state not in (LinkKit.LinkKitState.INITIALIZED, LinkKit.LinkKitState.DISCONNECTED): + raise LinkKit.StateError("not in INITIALIZED or DISCONNECTED state") + self._LinkKit__connect_async_req = True + with self._LinkKit__worker_loop_exit_req_lock: + self._LinkKit__worker_loop_exit_req = False + return self._LinkKit__loop_thread.start(self._LinkKit__loop_forever_internal) + + def disconnect(self): + self._LinkKit__link_log.debug("disconnect") + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DISCONNECTING + if self._LinkKit__connect_async_req: + with self._LinkKit__worker_loop_exit_req_lock: + self._LinkKit__worker_loop_exit_req = True + self._LinkKit__mqtt_client.disconnect() + self._LinkKit__loop_thread.stop() + + @staticmethod + def __check_topic_string(topic): + if len(topic) > 128 or len(topic) == 0: + raise ValueError("topic string length too long,need decrease %d bytes" % (128 - len(topic))) + + def publish_topic(self, topic, payload=None, qos=1): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + elif topic is None or len(topic) == 0: + raise ValueError("Invalid topic.") + if qos != 0 and qos != 1: + raise ValueError("Invalid qos.") + self._LinkKit__check_topic_string(topic) + rc, mid = self._LinkKit__mqtt_client.publish(topic, payload, qos) + if rc == 0: + return ( + 0, mid) + return (1, None) + + def subscribe_topicParse error at or near `COME_FROM' instruction at offset 340_0 + + def unsubscribe_topic(self, topic): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + else: + unsubscribe_topics = [] + if not topic is None: + if topic == "": + raise ValueError("Invalid topic.") + if isinstance(topic, str): + self._LinkKit__check_topic_string(topic) + if topic not in self._LinkKit__user_topics: + return (1, None) + unsubscribe_topics.append(topic) + elif isinstance(topic, list): + for one_topic in topic: + self._LinkKit__check_topic_string(one_topic) + if one_topic in self._LinkKit__user_topics: + unsubscribe_topics.append(one_topic) + continue + + with self._LinkKit__user_topics_unsubscribe_request_lock: + if len(unsubscribe_topics) == 0: + return (2, None) + ret = self._LinkKit__mqtt_client.unsubscribe(unsubscribe_topics) + rc, mid = ret + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__user_topics_unsubscribe_request[mid] = unsubscribe_topics + return ret + return (1, None) + + def __make_rrpc_topic(self, topic): + return "/ext/rrpc/+%s" % topic + + def subscribe_rrpc_topicParse error at or near `COME_FROM' instruction at offset 340_0 + + def unsubscribe_rrpc_topicParse error at or near `POP_BLOCK' instruction at offset 252 + + def __on_internal_connect_safe(self, client, user_data, session_flag, rc): + if rc == 0: + self._LinkKit__reset_reconnect_wait() + session_flag_internal = {"session present": session_flag} + self._LinkKit__handler_task.post_message(self._LinkKit__handler_task_cmd_on_connect, ( + client, user_data, session_flag_internal, rc)) + + def __loop_forever_internal(self): + self._LinkKit__link_log.debug("enter") + self._LinkKit__linkkit_state = LinkKit.LinkKitState.CONNECTING + if self._LinkKit__device_secret is None or self._LinkKit__device_secret == "": + rc, value = self._LinkKit__dynamic_register_device() + try: + self._LinkKit__on_device_dynamic_register(rc, value, self._LinkKit__user_data) + if rc == 0: + self._LinkKit__device_secret = value + else: + self._LinkKit__link_log.error("dynamic register device fail:" + value) + self._LinkKit__linkkit_state = LinkKit.LinkKitState.INITIALIZED + return 1 + except Exception as e: + try: + self._LinkKit__link_log.error(e) + self._LinkKit__linkkit_state = LinkKit.LinkKitState.INITIALIZED + return 2 + finally: + e = None + del e + + try: + self._LinkKit__config_mqtt_client_internal() + except ssl.SSLError as e: + try: + self._LinkKit__link_log.error("config mqtt raise exception:" + str(e)) + self._LinkKit__linkkit_state = LinkKit.LinkKitState.INITIALIZED + self._LinkKit__on_internal_connect_safe(None, None, 0, 6) + return + finally: + e = None + del e + + try: + self._LinkKit__mqtt_client.connect_async(host=(self._LinkKit__host_name_internal), port=(self._LinkKit__mqtt_port), keepalive=(self._LinkKit__mqtt_keep_alive)) + except Exception as e: + try: + self._LinkKit__link_log.error("__loop_forever_internal connect raise exception:" + str(e)) + self._LinkKit__linkkit_state = LinkKit.LinkKitState.INITIALIZED + self._LinkKit__on_internal_connect_safe(None, None, 0, 7) + return + finally: + e = None + del e + + while True: + if self._LinkKit__worker_loop_exit_req: + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.DESTRUCTING: + self._LinkKit__handler_task.stop() + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DESTRUCTED + break + try: + self._LinkKit__linkkit_state = LinkKit.LinkKitState.CONNECTING + self._LinkKit__mqtt_client.reconnect() + except (socket.error, OSError) as e: + try: + self._LinkKit__link_log.error(e) + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.CONNECTING: + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DISCONNECTED + self._LinkKit__on_internal_connect_safe(None, None, 0, 9) + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.DESTRUCTING: + self._LinkKit__handler_task.stop() + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DESTRUCTED + break + self._LinkKit__reconnect_wait() + continue + finally: + e = None + del e + + rc = mqtt.MQTT_ERR_SUCCESS + while rc == mqtt.MQTT_ERR_SUCCESS: + rc = self._LinkKit__mqtt_client.loop(self._LinkKit__mqtt_request_timeout, 1) + self._LinkKit__clean_timeout_message() + self._LinkKit__clean_thing_timeout_request_id() + + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.CONNECTED: + self._LinkKit__on_internal_disconnect(None, None, 1) + self._LinkKit__link_log.info("loop return:%r" % rc) + if self._LinkKit__worker_loop_exit_req: + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.DESTRUCTING: + self._LinkKit__handler_task.stop() + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DESTRUCTED + break + self._LinkKit__reconnect_wait() + + def __clean_timeout_message(self): + expire_timestamp = self._LinkKit__timestamp() - self._LinkKit__mqtt_request_timeout * 1000 + with self._LinkKit__thing_prop_post_mid_lock: + self._LinkKit__clean_timeout_message_item(self._LinkKit__thing_prop_post_mid, expire_timestamp) + with self._LinkKit__thing_event_post_mid_lock: + self._LinkKit__clean_timeout_message_item(self._LinkKit__thing_event_post_mid, expire_timestamp) + with self._LinkKit__thing_answer_service_mid_lock: + self._LinkKit__clean_timeout_message_item(self._LinkKit__thing_answer_service_mid, expire_timestamp) + with self._LinkKit__thing_raw_up_mid_lock: + self._LinkKit__clean_timeout_message_item(self._LinkKit__thing_raw_up_mid, expire_timestamp) + with self._LinkKit__thing_raw_down_reply_mid_lock: + self._LinkKit__clean_timeout_message_item(self._LinkKit__thing_raw_down_reply_mid, expire_timestamp) + with self._LinkKit__thing_prop_set_reply_mid_lock: + self._LinkKit__clean_timeout_message_item(self._LinkKit__thing_prop_set_reply_mid, expire_timestamp) + self._LinkKit__clean_timeout_message_item(self._LinkKit__thing_subscribe_sys_request_mid, expire_timestamp) + self._LinkKit__clean_timeout_message_item(self._LinkKit__device_info_mid, expire_timestamp) + + def __clean_timeout_message_item(self, mids, expire_time): + for mid in list(mids.keys()): + if mids[mid] < expire_time: + timestamp = mids.pop(mid) + self._LinkKit__link_log.error("__clean_timeout_message_item pop:%r,timestamp:%r", mid, timestamp) + + def __reconnect_wait(self): + if self._LinkKit__mqtt_auto_reconnect_sec == 0: + self._LinkKit__mqtt_auto_reconnect_sec = self._LinkKit__mqtt_auto_reconnect_min_sec + else: + self._LinkKit__mqtt_auto_reconnect_sec = min(self._LinkKit__mqtt_auto_reconnect_sec * 2, self._LinkKit__mqtt_auto_reconnect_max_sec) + self._LinkKit__mqtt_auto_reconnect_sec += random.randint(1, self._LinkKit__mqtt_auto_reconnect_sec) + time.sleep(self._LinkKit__mqtt_auto_reconnect_sec) + + def __reset_reconnect_wait(self): + self._LinkKit__mqtt_auto_reconnect_sec = 0 + + def start_worker_loop(self): + pass + + def thing_setup(self, file=None): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.INITIALIZED: + raise LinkKit.StateError("not in INITIALIZED state") + elif self._LinkKit__thing_setup_state: + return 1 + elif file is None: + self._LinkKit__thing_raw_only = True + self._LinkKit__thing_setup_state = True + return 0 + try: + with open(file, encoding="utf-8") as f: + tsl = json.load(f) + index = 0 + while index < len(tsl["events"]): + identifier = tsl["events"][index]["identifier"] + if identifier == "post": + output_data = tsl["events"][index]["outputData"] + output_data_index = 0 + while output_data_index < len(output_data): + output_data_item = output_data[output_data_index]["identifier"] + self._LinkKit__thing_properties_post.add(output_data_item) + output_data_index += 1 + + else: + self._LinkKit__thing_events.add(identifier) + index += 1 + + index = 0 + while index < len(tsl["services"]): + identifier = tsl["services"][index]["identifier"] + if identifier == "set": + input_data = tsl["services"][index]["inputData"] + input_data_index = 0 + while input_data_index < len(input_data): + input_data_item = input_data[input_data_index] + self._LinkKit__thing_properties_set.add(input_data_item["identifier"]) + input_data_index += 1 + + else: + if identifier == "get": + output_data = tsl["services"][index]["outputData"] + output_data_index = 0 + while output_data_index < len(output_data): + output_data_item = output_data[output_data_index] + self._LinkKit__thing_properties_get.add(output_data_item["identifier"]) + output_data_index += 1 + + else: + self._LinkKit__thing_services.add(identifier) + service_reply_topic = self._LinkKit__thing_topic_service_pattern % (self._LinkKit__product_key, + self._LinkKit__device_name, + identifier + "_reply") + self._LinkKit__thing_topic_services_reply.add(service_reply_topic) + index += 1 + + for event in self._LinkKit__thing_events: + post_topic = self._LinkKit__thing_topic_event_post_pattern % ( + self._LinkKit__product_key, self._LinkKit__device_name, event) + self._LinkKit__thing_topic_event_post[event] = post_topic + self._LinkKit__thing_topic_event_post_reply.add(post_topic + "_reply") + + for service in self._LinkKit__thing_services: + self._LinkKit__thing_topic_services.add(self._LinkKit__thing_topic_service_pattern % ( + self._LinkKit__product_key, self._LinkKit__device_name, service)) + + except Exception as e: + try: + self._LinkKit__link_log.info("file open error:" + str(e)) + return 2 + finally: + e = None + del e + + self._LinkKit__thing_setup_state = True + return 0 + + def __subscribe_sys_topic(self): + subscribe_sys_topics = [ + ( + self._LinkKit__device_info_topic_reply, 0)] + if self._LinkKit__thing_setup_state: + if self._LinkKit__thing_raw_only: + thing_subscribe_topics = [ + ( + self._LinkKit__thing_topic_raw_down, 0), + ( + self._LinkKit__thing_topic_raw_up_reply, 0)] + else: + thing_subscribe_topics = [ + ( + self._LinkKit__thing_topic_prop_set, 0), + ( + self._LinkKit__thing_topic_prop_get, 0), + ( + self._LinkKit__thing_topic_raw_down, 0), + ( + self._LinkKit__thing_topic_prop_post_reply, 0), + ( + self._LinkKit__thing_topic_raw_up_reply, 0), + ( + self._LinkKit__thing_topic_update_device_info_reply, 0), + ( + self._LinkKit__thing_topic_delete_device_info_reply, 0), + ( + self._LinkKit__thing_topic_shadow_get, 0)] + for topic in self._LinkKit__thing_topic_services: + thing_subscribe_topics.append((topic, 0)) + + for topic in self._LinkKit__thing_topic_event_post_reply: + thing_subscribe_topics.append((topic, 0)) + + subscribe_sys_topics += thing_subscribe_topics + with self._LinkKit__thing_subscribe_sys_request_lock: + rc, mid = self._LinkKit__mqtt_client.subscribe(subscribe_sys_topics) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_subscribe_sys_request = True + self._LinkKit__thing_subscribe_sys_request_mid[mid] = self._LinkKit__timestamp() + return 0 + return 1 + + def thing_raw_post_data(self, payload): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + with self._LinkKit__thing_raw_up_mid_lock: + rc, mid = self._LinkKit__mqtt_client.publish(self._LinkKit__thing_topic_raw_up, payload, 0) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_raw_up_mid[mid] = self._LinkKit__timestamp() + return 0 + return 1 + + def thing_raw_data_reply(self, payload): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + with self._LinkKit__thing_raw_down_reply_mid_lock: + rc, mid = self._LinkKit__mqtt_client.publish(self._LinkKit__thing_topic_raw_down_reply, payload, 0) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_raw_down_reply_mid[mid] = self._LinkKit__timestamp() + return 0 + return 1 + + def thing_update_device_info(self, payload): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + else: + raise self._LinkKit__thing_setup_state and self._LinkKit__thing_enable_state or LinkKit.StateError("not in SETUP & ENABLE state") + return (1, None) + request_id = self._LinkKit__get_thing_request_id() + with self._LinkKit__thing_update_device_info_up_mid_lock: + rc, mid = self._LinkKit__mqtt_client.publish(self._LinkKit__thing_topic_update_device_info_up, self._LinkKit__pack_alink_request(request_id, "thing.deviceinfo.update", payload), 0) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_update_device_info_up_mid[mid] = self._LinkKit__timestamp() + return (rc, request_id) + return (1, None) + + def thing_delete_device_info(self, payload): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + else: + return self._LinkKit__thing_setup_state and self._LinkKit__thing_enable_state or 1 + request_id = self._LinkKit__get_thing_request_id() + with self._LinkKit__thing_delete_device_info_up_mid_lock: + rc, mid = self._LinkKit__mqtt_client.publish(self._LinkKit__thing_topic_delete_device_info_up, self._LinkKit__pack_alink_request(request_id, "thing.deviceinfo.delete", payload), 0) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_delete_device_info_up_mid[mid] = self._LinkKit__timestamp() + return (rc, request_id) + return (1, None) + + def thing_update_tags(self, tagMap): + if not isinstance(tagMap, dict): + raise ValueError("tagMap must be a dictionary") + return (1, None) + payload = [] + for k, v in tagMap.items(): + payload.append({(LinkKit.TAG_KEY): k, (LinkKit.TAG_VALUE): v}) + + return self.thing_update_device_info(payload) + + def thing_remove_tags(self, tagKeys): + if not isinstance(tagKeys, list): + if not isinstance(tagKeys, tuple): + raise ValueError("tagKeys must be a list or tuple") + return (1, None) + payload = [] + for tagKey in tagKeys: + payload.append({(LinkKit.TAG_KEY): tagKey}) + + return self.thing_delete_device_info(payload) + + def __pack_alink_request(self, request_id, method, params): + request = { + 'id': request_id, + 'version': '"1.0"', + 'params': params, + 'method': method} + return json.dumps(request) + + def thing_answer_service(self, identifier, request_id, code, data=None): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + elif self._LinkKit__thing_setup_state: + if not self._LinkKit__thing_enable_state: + return 1 + if data is None: + data = {} + response = {'id':request_id, + 'code':code, + 'data':data} + item = self._LinkKit__pop_rrpc_service("alink_" + str(request_id)) + if item: + service_reply_topic = item["topic"] + else: + service_reply_topic = self._LinkKit__thing_topic_service_pattern % (self._LinkKit__product_key, + self._LinkKit__device_name, + identifier + "_reply") + with self._LinkKit__thing_answer_service_mid_lock: + rc, mid = self._LinkKit__mqtt_client.publish(service_reply_topic, json.dumps(response), 0) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_answer_service_mid[mid] = self._LinkKit__timestamp() + return 0 + return 1 + + def __get_thing_request_id(self): + with self._LinkKit__thing_request_id_lock: + self._LinkKit__thing_request_value += 1 + if self._LinkKit__thing_request_value > self._LinkKit__thing_request_id_max: + self._LinkKit__thing_request_value = 0 + if len(self._LinkKit__thing_request_id) > self._LinkKit__mqtt_max_queued_message: + return + if self._LinkKit__thing_request_value not in self._LinkKit__thing_request_id: + self._LinkKit__thing_request_id[self._LinkKit__thing_request_value] = self._LinkKit__timestamp() + self._LinkKit__link_log.debug("__get_thing_request_id pop:%r" % self._LinkKit__thing_request_value) + return str(self._LinkKit__thing_request_value) + return + + def __back_thing_request_id(self, post_id): + with self._LinkKit__thing_request_id_lock: + try: + self._LinkKit__thing_request_id.pop(int(post_id)) + except Exception as e: + try: + self._LinkKit__link_log.error("__back_thing_request_id pop:%r,%r" % (post_id, e)) + finally: + e = None + del e + + def __reset_thing_request_id(self): + with self._LinkKit__thing_request_id_lock: + self._LinkKit__thing_request_value = 0 + self._LinkKit__thing_request_id.clear() + + def __clean_thing_timeout_request_id(self): + with self._LinkKit__thing_request_id_lock: + expire_timestamp = self._LinkKit__timestamp() - self._LinkKit__mqtt_request_timeout * 1000 + for request_id in list(self._LinkKit__thing_request_id.keys()): + if self._LinkKit__thing_request_id[request_id] < expire_timestamp: + timestamp = self._LinkKit__thing_request_id.pop(request_id) + self._LinkKit__link_log.error("__clean_thing_timeout_request_id pop:%r,timestamp:%r", request_id, timestamp) + + def thing_trigger_event(self, event_tuple): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + else: + return self._LinkKit__thing_setup_state and self._LinkKit__thing_enable_state or (1, + None) + if isinstance(event_tuple, tuple): + event, params = event_tuple + else: + return (1, None) + if event not in self._LinkKit__thing_topic_event_post.keys(): + return (1, None) + request_id = self._LinkKit__get_thing_request_id() + if request_id is None: + return 1 + request = {'id':request_id, 'version':"1.0", + 'params':{"value": params}, + 'method':"thing.event.%s.post" % event} + with self._LinkKit__thing_event_post_mid_lock: + event_topic = self._LinkKit__thing_topic_event_post[event] + self._LinkKit__link_log.debug("thing_trigger_event publish topic") + rc, mid = self._LinkKit__mqtt_client.publish(event_topic, json.dumps(request), 0) + self._LinkKit__link_log.debug("thing_trigger_event publish done") + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_event_post_mid[mid] = self._LinkKit__timestamp() + return (0, request_id) + return (1, None) + + def thing_post_property(self, property_data): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + else: + return self._LinkKit__thing_setup_state and self._LinkKit__thing_enable_state or (1, + None) + request_params = property_data + request_id = self._LinkKit__get_thing_request_id() + if request_id is None: + return (1, None) + request = {'id': request_id, + 'version': '"1.0"', + 'params': request_params, + 'method': '"thing.event.property.post"'} + with self._LinkKit__thing_prop_post_mid_lock: + rc, mid = self._LinkKit__mqtt_client.publish(self._LinkKit__thing_topic_prop_post, json.dumps(request), 1) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_prop_post_mid[mid] = self._LinkKit__timestamp() + return (0, request_id) + return (1, None) + + def __on_internal_async_messageParse error at or near `COME_FROM' instruction at offset 1624_2 + + def __parse_raw_topic(self, topic): + return re.search("/ext/rrpc/.*?(/.*)", topic).group(1) + + def __tidy_topic(self, topic): + if topic == None: + return + topic = topic.strip() + if len(topic) == 0: + return + if topic[0] != "/": + topic = "/" + topic + return topic + + def __push_rrpc_service(self, item): + with self._LinkKit__user_rrpc_request_ids_lock: + if len(self._LinkKit__user_rrpc_request_ids) > self._LinkKit__user_rrpc_request_max_len: + removed_item = self._LinkKit__user_rrpc_request_ids.pop(0) + del self._LinkKit__user_rrpc_request_id_index_map[removed_item["id"]] + self._LinkKit__user_rrpc_request_ids.append(item) + self._LinkKit__user_rrpc_request_id_index_map[item["id"]] = 0 + + def __pop_rrpc_service(self, id): + with self._LinkKit__user_rrpc_request_ids_lock: + if id not in self._LinkKit__user_rrpc_request_id_index_map: + return + del self._LinkKit__user_rrpc_request_id_index_map[id] + for index in range(0, len(self._LinkKit__user_rrpc_request_ids)): + item = self._LinkKit__user_rrpc_request_ids[index] + if item["id"] == id: + del self._LinkKit__user_rrpc_request_ids[index] + return item + + return + + def thing_answer_rrpc(self, id, response): + item = self._LinkKit__pop_rrpc_service("rrpc_" + id) + if item == None: + self._LinkKit__link_log.error("answer_rrpc_topic, the id does not exist: %s" % id) + return (1, None) + rc, mid = self._LinkKit__mqtt_client.publish(item["topic"], response, 0) + self._LinkKit__link_log.debug("reply topic:%s" % item["topic"]) + return (rc, mid) + + def __try_parse_rrpc_topic(self, message): + self._LinkKit__link_log.debug("receive a rrpc topic:%s" % message.topic) + raw_topic = self._LinkKit__parse_raw_topic(message.topic) + if raw_topic.startswith("/sys"): + if raw_topic in self._LinkKit__thing_topic_services: + identifier = raw_topic.split("/", 6)[6] + payload = self._LinkKit__load_json(self._LinkKit__to_str(message.payload)) + try: + request_id = payload["id"] + params = payload["params"] + item_id = "alink_" + request_id + item = {'id':item_id, 'request_id':request_id, 'payload':payload, 'identifier':identifier, 'topic':message.topic} + self._LinkKit__push_rrpc_service(item) + self._LinkKit__on_thing_call_service(identifier, request_id, params, self._LinkKit__user_data) + except Exception as e: + try: + self._LinkKit__link_log.error("on_thing_call_service raise exception: %s" % e) + finally: + e = None + del e + + return + else: + with self._LinkKit__user_rrpc_topics_subscribe_request_lock: + with self._LinkKit__user_rrpc_topics_lock: + if raw_topic not in self._LinkKit__user_rrpc_topics: + self._LinkKit__link_log.error("%s is not in the rrpc-subscribed list" % raw_topic) + return + return self._LinkKit__on_topic_rrpc_message or None + try: + rrpc_id = message.topic.split("/", 4)[3] + item_id = "rrpc_" + rrpc_id + item = {'id':item_id, 'payload':message.payload, 'topic':message.topic} + self._LinkKit__push_rrpc_service(item) + self._LinkKit__on_topic_rrpc_message(rrpc_id, message.topic, message.payload, message.qos, self._LinkKit__user_data) + except Exception as e: + try: + self._LinkKit__link_log.error("on_topic_rrpc_message process raise exception:%r" % e) + finally: + e = None + del e + + def __try_parse_try_shadow(self, payload): + try: + self._LinkKit__latest_shadow.set_latest_recevied_time(self._LinkKit__timestamp()) + self._LinkKit__latest_shadow.set_latest_recevied_payload(payload) + msg = self._LinkKit__load_json(payload) + if "version" in msg: + self._LinkKit__latest_shadow.set_version(msg["version"]) + else: + if "payload" in msg: + if "version" in msg["payload"]: + self._LinkKit__latest_shadow.set_version(msg["payload"]["version"]) + elif "timestamp" in msg: + self._LinkKit__latest_shadow.set_timestamp(msg["timestamp"]) + else: + if "payload" in msg: + if "timestamp" in msg["payload"]: + self._LinkKit__latest_shadow.set_timestamp(msg["payload"]["timestamp"]) + if "payload" in msg: + if msg["payload"]["status"] == "success": + if "state" in msg["payload"]: + self._LinkKit__latest_shadow.set_state(msg["payload"]["state"]) + if "metadata" in msg["payload"]: + self._LinkKit__latest_shadow.set_metadata(msg["payload"]["metadata"]) + except Exception as e: + try: + pass + finally: + e = None + del e + + def thing_update_shadow(self, reported, version): + request = {'state':{"reported": reported}, + 'method':"update", + 'version':version} + return self._LinkKit__thing_update_shadow(request) + + def thing_get_shadow(self): + request = {"method": "get"} + return self._LinkKit__thing_update_shadow(request) + + def local_get_latest_shadow(self): + return self._LinkKit__latest_shadow + + def __thing_update_shadow(self, request): + if self._LinkKit__linkkit_state is not LinkKit.LinkKitState.CONNECTED: + raise LinkKit.StateError("not in CONNECTED state") + else: + return self._LinkKit__thing_setup_state and self._LinkKit__thing_enable_state or (1, + None) + with self._LinkKit__thing_shadow_mid_lock: + rc, mid = self._LinkKit__mqtt_client.publish(self._LinkKit__thing_topic_shadow_update, json.dumps(request), 1) + if rc == mqtt.MQTT_ERR_SUCCESS: + self._LinkKit__thing_shadow_mid[mid] = self._LinkKit__timestamp() + return (0, mid) + return (1, None) + + def __on_internal_message(self, client, user_data, message): + self._LinkKit__link_log.info("__on_internal_message") + self._LinkKit__handler_task.post_message(self._LinkKit__handler_task_cmd_on_message, (client, user_data, message)) + + def __handler_task_on_message_callback(self, value): + client, user_data, message = value + self._LinkKit__on_internal_async_message(message) + + def __on_internal_connect(self, client, user_data, session_flag, rc): + self._LinkKit__link_log.info("__on_internal_connect") + if rc == 0: + self._LinkKit__reset_reconnect_wait() + self._LinkKit__subscribe_sys_topic() + self._LinkKit__handler_task.post_message(self._LinkKit__handler_task_cmd_on_connect, (client, user_data, session_flag, rc)) + + def __handler_task_on_connect_callback(self, value): + client, user_data, session_flag, rc = value + self._LinkKit__link_log.info("__on_internal_connect enter") + self._LinkKit__link_log.debug("session:%d, return code:%d" % (session_flag["session present"], rc)) + if rc == 0: + self._LinkKit__linkkit_state = LinkKit.LinkKitState.CONNECTED + if self._LinkKit__on_connect is not None: + try: + self._LinkKit__on_connect(session_flag["session present"], rc, self._LinkKit__user_data) + except Exception as e: + try: + self._LinkKit__link_log.error("on_connect process raise exception:%r" % e) + finally: + e = None + del e + + def __on_internal_disconnect(self, client, user_data, rc): + self._LinkKit__link_log.info("__on_internal_disconnect enter") + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.DESTRUCTING: + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DESTRUCTED + else: + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.DISCONNECTING: + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DISCONNECTED + else: + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.CONNECTED: + self._LinkKit__linkkit_state = LinkKit.LinkKitState.DISCONNECTED + else: + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.DISCONNECTED: + self._LinkKit__link_log.error("__on_internal_disconnect enter from wrong state:%r" % self._LinkKit__linkkit_state) + return + self._LinkKit__link_log.error("__on_internal_disconnect enter from wrong state:%r" % self._LinkKit__linkkit_state) + return + self._LinkKit__user_topics.clear() + self._LinkKit__user_topics_subscribe_request.clear() + self._LinkKit__user_topics_unsubscribe_request.clear() + self._LinkKit__user_rrpc_topics.clear() + self._LinkKit__user_rrpc_topics_subscribe_request.clear() + self._LinkKit__user_rrpc_topics_unsubscribe_request.clear() + self._LinkKit__thing_prop_post_mid.clear() + self._LinkKit__thing_event_post_mid.clear() + self._LinkKit__thing_answer_service_mid.clear() + self._LinkKit__thing_raw_down_reply_mid.clear() + self._LinkKit__thing_raw_up_mid.clear() + self._LinkKit__thing_shadow_mid.clear() + self._LinkKit__device_info_mid.clear() + self._LinkKit__thing_update_device_info_up_mid.clear() + self._LinkKit__thing_delete_device_info_up_mid.clear() + self._LinkKit__handler_task.post_message(self._LinkKit__handler_task_cmd_on_disconnect, (client, user_data, rc)) + if self._LinkKit__linkkit_state == LinkKit.LinkKitState.DESTRUCTED: + self._LinkKit__handler_task.stop() + + def __handler_task_on_disconnect_callback(self, value): + self._LinkKit__link_log.info("__handler_task_on_disconnect_callback enter") + client, user_data, rc = value + if self._LinkKit__thing_setup_state: + if self._LinkKit__thing_enable_state: + self._LinkKit__thing_enable_state = False + if self._LinkKit__on_thing_disable is not None: + try: + self._LinkKit__on_thing_disable(self._LinkKit__user_data) + except Exception as e: + try: + self._LinkKit__link_log.error("on_thing_disable process raise exception:%r" % e) + finally: + e = None + del e + + if self._LinkKit__on_disconnect is not None: + try: + self._LinkKit__on_disconnect(rc, self._LinkKit__user_data) + except Exception as e: + try: + self._LinkKit__link_log.error("on_disconnect process raise exception:%r" % e) + finally: + e = None + del e + + def __on_internal_publish(self, client, user_data, mid): + self._LinkKit__handler_task.post_message(self._LinkKit__handler_task_cmd_on_publish, (client, user_data, mid)) + + def __handler_task_on_publish_callback(self, value): + client, user_data, mid = value + self._LinkKit__link_log.debug("__on_internal_publish message:%d" % mid) + with self._LinkKit__thing_event_post_mid_lock: + if mid in self._LinkKit__thing_event_post_mid: + self._LinkKit__thing_event_post_mid.pop(mid) + self._LinkKit__link_log.debug("__on_internal_publish event post mid removed") + return + with self._LinkKit__thing_prop_post_mid_lock: + if mid in self._LinkKit__thing_prop_post_mid: + self._LinkKit__thing_prop_post_mid.pop(mid) + self._LinkKit__link_log.debug("__on_internal_publish prop post mid removed") + return + with self._LinkKit__thing_prop_set_reply_mid_lock: + if mid in self._LinkKit__thing_prop_set_reply_mid: + self._LinkKit__thing_prop_set_reply_mid.pop(mid) + self._LinkKit__link_log.debug("__on_internal_publish prop set reply mid removed") + return + with self._LinkKit__thing_answer_service_mid_lock: + if mid in self._LinkKit__thing_answer_service_mid: + self._LinkKit__thing_answer_service_mid.pop(mid) + self._LinkKit__link_log.debug("__thing_answer_service_mid mid removed") + return + with self._LinkKit__thing_raw_up_mid_lock: + if mid in self._LinkKit__thing_raw_up_mid: + self._LinkKit__thing_raw_up_mid.pop(mid) + self._LinkKit__link_log.debug("__thing_raw_up_mid mid removed") + return + with self._LinkKit__thing_raw_down_reply_mid_lock: + if mid in self._LinkKit__thing_raw_down_reply_mid: + self._LinkKit__thing_raw_down_reply_mid.pop(mid) + self._LinkKit__link_log.debug("__thing_raw_down_reply_mid mid removed") + return + with self._LinkKit__device_info_mid_lock: + if mid in self._LinkKit__device_info_mid: + self._LinkKit__device_info_mid.pop(mid) + self._LinkKit__link_log.debug("__device_info_mid mid removed") + return + with self._LinkKit__thing_shadow_mid_lock: + if mid in self._LinkKit__thing_shadow_mid: + self._LinkKit__thing_shadow_mid.pop(mid) + self._LinkKit__link_log.debug("__thing_shadow_mid mid removed") + return + with self._LinkKit__thing_update_device_info_up_mid_lock: + if mid in self._LinkKit__thing_update_device_info_up_mid: + self._LinkKit__thing_update_device_info_up_mid.pop(mid) + self._LinkKit__link_log.debug("__thing_update_device_info_up_mid mid removed") + return + with self._LinkKit__thing_delete_device_info_up_mid_lock: + if mid in self._LinkKit__thing_delete_device_info_up_mid: + self._LinkKit__thing_delete_device_info_up_mid.pop(mid) + self._LinkKit__link_log.debug("__thing_delete_device_info_up_mid mid removed") + return + if self._LinkKit__on_publish_topic is not None: + self._LinkKit__on_publish_topic(mid, self._LinkKit__user_data) + + def __on_internal_subscribe(self, client, user_data, mid, granted_qos): + self._LinkKit__handler_task.post_message(self._LinkKit__handler_task_cmd_on_subscribe, (client, user_data, mid, granted_qos)) + + def __handler_task_on_subscribe_callback(self, value): + client, user_data, mid, granted_qos = value + self._LinkKit__link_log.debug("__on_internal_subscribe mid:%d granted_qos:%s" % ( + mid, str(",".join(("%s" % it for it in granted_qos))))) + if self._LinkKit__thing_subscribe_sys_request: + if mid in self._LinkKit__thing_subscribe_sys_request_mid: + self._LinkKit__thing_subscribe_sys_request_mid.pop(mid) + self._LinkKit__thing_subscribe_sys_request = False + if self._LinkKit__thing_setup_state: + self._LinkKit__thing_enable_state = True + if self._LinkKit__on_thing_enable: + self._LinkKit__on_thing_enable(self._LinkKit__user_data) + return + with self._LinkKit__user_rrpc_topics_subscribe_request_lock: + if mid in self._LinkKit__user_rrpc_topics_subscribe_request: + self._LinkKit__user_rrpc_topics_subscribe_request.pop(mid) + if self._LinkKit__on_subscribe_rrpc_topic: + try: + self._LinkKit__on_subscribe_rrpc_topic(mid, granted_qos, self._LinkKit__user_data) + except Exception as err: + try: + self._LinkKit__link_log.error("Caught exception in on_subscribe_topic: %s", err) + finally: + err = None + del err + + return + topics_requests = None + self._LinkKit__user_topics_request_lock.acquire() + if mid in self._LinkKit__user_topics_subscribe_request: + topics_requests = self._LinkKit__user_topics_subscribe_request.pop(mid) + self._LinkKit__user_topics_request_lock.release() + if topics_requests is not None: + return_topics = [] + for index in range(len(topics_requests)): + if granted_qos[index] < 0 or granted_qos[index] > 1: + self._LinkKit__link_log.error("topics:%s, granted wrong:%d" % ( + topics_requests[index], granted_qos[index])) + else: + self._LinkKit__user_topics[topics_requests[index][0]] = granted_qos[index] + return_topics.append((topics_requests[index], granted_qos[index])) + + if self._LinkKit__on_subscribe_topic is not None: + try: + self._LinkKit__on_subscribe_topic(mid, granted_qos, self._LinkKit__user_data) + except Exception as err: + try: + self._LinkKit__link_log.error("Caught exception in on_subscribe_topic: %s", err) + finally: + err = None + del err + + def __on_internal_unsubscribe(self, client, user_data, mid): + self._LinkKit__handler_task.post_message(self._LinkKit__handler_task_cmd_on_unsubscribe, (client, user_data, mid)) + + def __handler_task_on_unsubscribe_callback(self, value): + client, user_data, mid = value + self._LinkKit__link_log.debug("__on_internal_unsubscribe mid:%d" % mid) + unsubscribe_request = None + with self._LinkKit__user_rrpc_topics_unsubscribe_request_lock: + if mid in self._LinkKit__user_rrpc_topics_unsubscribe_request: + self._LinkKit__user_rrpc_topics_unsubscribe_request.pop(mid) + if self._LinkKit__on_unsubscribe_rrpc_topic: + try: + self._LinkKit__on_unsubscribe_rrpc_topic(mid, self._LinkKit__user_data) + except Exception as err: + try: + self._LinkKit__link_log.error("Caught exception in on_unsubscribe_rrpc_topic: %s", err) + finally: + err = None + del err + + return + with self._LinkKit__user_topics_unsubscribe_request_lock: + if mid in self._LinkKit__user_topics_unsubscribe_request: + unsubscribe_request = self._LinkKit__user_topics_unsubscribe_request.pop(mid) + if unsubscribe_request is not None: + for t in unsubscribe_request: + self._LinkKit__link_log.debug("__user_topics:%s" % str(self._LinkKit__user_topics)) + try: + self._LinkKit__user_topics.pop(t) + except Exception as e: + try: + self._LinkKit__link_log.error("__on_internal_unsubscribe e:" + str(e)) + return + finally: + e = None + del e + + if self._LinkKit__on_unsubscribe_topic is not None: + try: + self._LinkKit__on_unsubscribe_topic(mid, self._LinkKit__user_data) + except Exception as err: + try: + self._LinkKit__link_log.error("Caught exception in on_unsubscribe_topic: %s", err) + finally: + err = None + del err + + def dump_user_topics(self): + return self._LinkKit__user_topics + + @staticmethod + def to_user_topic(topic): + topic_section = topic.split("/", 3) + user_topic = topic_section[3] + return user_topic + + def to_full_topic(self, topic): + return self._LinkKit__USER_TOPIC_PREFIX % (self._LinkKit__product_key, self._LinkKit__device_name, topic) + + @staticmethod + def __timestamp(): + return int(time.time() * 1000) \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/linksdktest/__init__.py b/APPS_UNCOMPILED/lib/linksdktest/__init__.py new file mode 100644 index 0000000..7369c7d --- /dev/null +++ b/APPS_UNCOMPILED/lib/linksdktest/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/linksdktest/__init__.py +# Compiled at: 2024-04-18 03:12:56 +pass diff --git a/APPS_UNCOMPILED/lib/linksdktest/common/__init__.py b/APPS_UNCOMPILED/lib/linksdktest/common/__init__.py new file mode 100644 index 0000000..85da738 --- /dev/null +++ b/APPS_UNCOMPILED/lib/linksdktest/common/__init__.py @@ -0,0 +1,18 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/linksdktest/common/__init__.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 361 bytes +import unittest, os, sys, time, threading +import paho.mqtt.client as mqtt +import logging, requests +from . import var, method + +def gen_product(): + url = "https://iottestproxy.aliyun-inc.com/iotApi/genericCall" + + +def gen_device(): + url = "https://iottestproxy.aliyun-inc.com/iotApi/genericCall" diff --git a/APPS_UNCOMPILED/lib/linksdktest/common/method.py b/APPS_UNCOMPILED/lib/linksdktest/common/method.py new file mode 100644 index 0000000..64670be --- /dev/null +++ b/APPS_UNCOMPILED/lib/linksdktest/common/method.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/linksdktest/common/method.py +# Compiled at: 2024-04-18 03:12:56 +pass diff --git a/APPS_UNCOMPILED/lib/linksdktest/common/var.py b/APPS_UNCOMPILED/lib/linksdktest/common/var.py new file mode 100644 index 0000000..c10af44 --- /dev/null +++ b/APPS_UNCOMPILED/lib/linksdktest/common/var.py @@ -0,0 +1,52 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/linksdktest/common/var.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 3558 bytes +"""**************************三元组信息*********************************""" +HOST_NAME_IOT = "cn-shanghai" +PRODUCT_KEY_IOT = "a16eO6FXRJU" +DEVICE_NAME_IOT = "sTSGIil6ANHAoftMX01W" +DEVICE_SECRET_IOT = "XVwg380smcP7De88a4PwSeblXxdWwulv" +PRODUCT_SECRET_IOT = "aFrFFJJMUNVZsJLj" +TOPIC_IOT = "/a16eO6FXRJU/sTSGIil6ANHAoftMX01W/test" +TOPIC_IOT_GET = "/a16eO6FXRJU/sTSGIil6ANHAoftMX01W/get" +HOST_NAME_LIVING = "cn-shanghai" +PRODUCT_KEY_LIVING = "a1K1YKPvdRn" +DEVICE_NAME_LIVING = "SDK-PY-TEST_02" +DEVICE_SECRET_LIVING = "f9KcXZqDW3Y6w2qsCxzZjinQRD8EBLYq" +PRODUCT_SECRET_LIVING = "bdUZd0qmIppC5xBc" +HOST_NAME_RAW = "cn-shanghai" +PRODUCT_KEY_RAW = "a1R3upYLB3G" +DEVICE_NAME_RAW = "SDK-PY-TEST_02" +DEVICE_SECRET_RAW = "pjkdA1wkGYTNieo7c0X560EMIr5VDFn3" +HOST_NAME_GLOBLE = "ap-northeast-1" +PRODUCT_KEY_GLOBLE = "a6ZOOANTk6N" +DEVICE_NAME_GLOBLE = "TEST_PY_SDK_01" +DEVICE_SECRET_GLOBLE = "nitC5vSwZ4T53sRWb7ruID62NAaYkAjs" +STR_LEN_NULL = "" +STR_LEN_1 = "a" +STR_LEN_4 = "aaaa" +STR_LEN_32 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +STR_LEN_128 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +STR_LEN_129 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +STR_LEN_160 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +STR_LEN_161 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +ALIYUN_BROKER_CA_DATA = "-----BEGIN CERTIFICATE-----\nMIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jvb3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAwMDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZjc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavpxy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdGsnUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJU26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N89iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOzyj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymPAbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUadDKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbMEHMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==\n-----END CERTIFICATE-----" +ALIYUN_BROKER_CA_DATA_ERROR = "123" +EXCEPTION = Exception("without Error throw") +EXCEPTION_CALLBACK = Exception("without callback") +RESULT_SUCCESS = 0 +RESULT_FAIL = 1 +LOG_FORMAT = {"format": "%(asctime)s-%(process)d-%(thread)d - %(name)s:%(module)s:%(funcName)s - %(levelname)s - %(message)s"} +TSL_EVENT = "Error" +TSL_PATH = "D:/PyCharm Community Edition 2018.1.3/testcase/common/tsl.json" +TSL_PATH_ERROR_PATH = "error_path" +TSL_PATH_ERROR_FORMAT = "D:/PyCharm Community Edition 2018.1.3/testcase/common/tsl_error.json" +LOOP_TIMES_10 = 10 +LOOP_TIMES_100 = 100 +LOOP_TIMES_1000 = 1000 +SLEEP_TIME_01 = 0.1 +THREAD_COUNT_2 = 2 diff --git a/APPS_UNCOMPILED/lib/mcprotocol/__init__.py b/APPS_UNCOMPILED/lib/mcprotocol/__init__.py new file mode 100644 index 0000000..21e543c --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/__init__.py @@ -0,0 +1,16 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 288 bytes +from .comfx import ComFx +from .mcformat import SerialModel, SockteModel +from .mcformat import PkgFormat, PkgFormat4 +from .type1c import Type1C +from .type3c import Type3C +from .type3e import Type3E +from .type4c import Type4C +from .type4e import Type4E +from .mcconst import * diff --git a/APPS_UNCOMPILED/lib/mcprotocol/comfx.py b/APPS_UNCOMPILED/lib/mcprotocol/comfx.py new file mode 100644 index 0000000..4acb637 --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/comfx.py @@ -0,0 +1,313 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/comfx.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 11460 bytes +""" +mcprotocol +Created on 2021/3/31 +@author: Lius +""" +import re +from typing import NoReturn +from .mcformat import SerialModel +DEVICE_READ = 48 +DEVICE_WRITE = 49 +FORCE_ON = 55 +FORCE_OFF = 56 +ENQ = 5 +STX = 2 +ETX = 3 +ACK = 6 +NAK = 21 + +class ComFx: + + def __init__(self, channel): + self.channel = channel + self._wordsize = 4 + + def _get_sum_check_code(self, calc_data): + """ Get sum check code. + """ + res = 0 + for byte in calc_data: + res += byte + + return res + + def _encode_value(self, value, mode='short', isSigned=False): + """encode com protocol value data to byte. + """ + try: + if mode == "byte": + value = value & 255 + value.to_bytes(1, "little", signed=isSigned) + value_byte = format(value, "x").rjust(2, "0").upper().encode() + else: + if mode == "short": + value = value & 65535 + value.to_bytes(2, "little", signed=isSigned) + value_byte = format(value, "x").rjust(4, "0").upper().encode() + else: + if mode == "long": + value = value & 4294967295L + value.to_bytes(4, "little", signed=isSigned) + value_byte = format(value, "x").rjust(8, "0").upper().encode() + else: + raise ValueError("Please input value type") + except: + raise ValueError("Exceeeded Device value range") + + return value_byte + + def _decode_value(self, byte, isSigned=False): + """decode byte to value + """ + try: + value = int(byte.decode(), 16) + if isSigned: + bit = 16 + if byte & 1 << bit - 1 != 0: + value = byte - (1 << bit) + except: + raise ValueError("Could not decode byte to value") + + return value + + def _check_cmdanswer(self, recv_data): + """check command answer. + """ + if recv_data == b'' or recv_data[0] == ACK: + return "No response data." + if recv_data[0] == NAK: + return "Illegal request data." + + def _x_or_y_offset(self, devicenum): + return int(devicenum / 10) + + def _s_or_m_offset(self, devicenum): + return int(devicenum / 8) + + def _get_read_address(self, devicecode, devicenum): + if devicecode == "Y": + return self._x_or_y_offset(int(devicenum)) + 160 + if devicecode == "X": + return self._x_or_y_offset(int(devicenum)) + 128 + if devicecode == "M": + return self._s_or_m_offset(int(devicenum)) + 256 + if devicecode == "S": + return self._s_or_m_offset(int(devicenum)) + if devicecode == "D": + return int(devicenum) * 2 + 4096 + if devicecode.startswith("CN"): + return int(devicenum) * 2 + 2560 + if devicecode.startswith("TN"): + return int(devicenum) * 2 + 2048 + + def _device_read(self, headdevice, readsize): + """read in byte units. + """ + request_data = bytes() + request_data += STX.to_bytes(1, "big") + request_data += DEVICE_READ.to_bytes(1, "big") + devicecode = re.search("\\D+", headdevice).group(0) + devicenum = re.search("\\d.*", headdevice).group(0) + address = "{:x}".format(self._get_read_address(devicecode, devicenum)) + request_data += address.rjust(4, "0").upper().encode() + size_value = readsize & 65535 + size_value.to_bytes(2, "little", signed=False) + request_data += format(size_value, "x").rjust(2, "0").upper().encode() + request_data += ETX.to_bytes(1, "big") + request_data += self._encode_value(self._get_sum_check_code(request_data[1[:None]]), "byte") + self.channel._send(request_data) + recv_data = self.channel._recv() + check_res = self._check_cmdanswer(recv_data) + if check_res: + return ([], check_res) + return ( + recv_data[1[:readsize * 2 + 1]], None) + + def batchread_wordunits(self, headdevice, readsize): + """batch read in word units. + """ + recv_data, error = self._device_read(headdevice, readsize * 2) + if error: + return ( + recv_data, error) + word_values = [] + _wordsize = 4 + data_index = 0 + for _ in range(readsize): + wordvalue = self._decode_value(recv_data[data_index[:data_index + _wordsize]]) + word_values.append(wordvalue >> 8 | (wordvalue & 255) << 8) + data_index += _wordsize + + return ( + word_values, None) + + def is_octal_device(self, headdevice): + devicecode = re.search("\\D+", headdevice).group(0) + if devicecode in ('X', 'Y'): + return True + return False + + def _read_bit_device(self, headdevice, readsize): + """read in byte units. + """ + request_data = bytes() + request_data += STX.to_bytes(1, "big") + request_data += DEVICE_READ.to_bytes(1, "big") + devicecode = re.search("\\D+", headdevice).group(0) + devicenum = re.search("\\d.*", headdevice).group(0) + address = "{:x}".format(self._get_read_address(devicecode, devicenum)) + request_data += address.rjust(4, "0").upper().encode() + size_value = readsize & 65535 + size_value.to_bytes(2, "little", signed=False) + request_data += format(size_value, "x").rjust(2, "0").upper().encode() + request_data += ETX.to_bytes(1, "big") + request_data += self._encode_value(self._get_sum_check_code(request_data[1[:None]]), "byte") + self.channel._send(request_data) + recv_data = self.channel._recv() + check_res = self._check_cmdanswer(recv_data) + if check_res: + return ([], check_res) + return ( + recv_data[1[:readsize * 2 + 1]], None) + + def batchread_bitunits(self, headdevice, readsize): + """batch read in bit units. + """ + bit = 16 + if self.is_octal_device(headdevice): + bit = 8 + elif readsize % bit == 0: + read_word_size = int(readsize / bit) + else: + read_word_size = int(readsize / bit) + 1 + _recv, error = self._read_bit_device(headdevice, readsize * 2) + if error: + return ( + _recv, error) + recv_data = [] + _wordsize = 4 + data_index = 0 + for _ in range(readsize): + wordvalue = self._decode_value(_recv[data_index[:data_index + _wordsize]]) + recv_data.append(wordvalue >> 8 | (wordvalue & 255) << 8) + data_index += _wordsize + + bit_values = [] + data_index = 0 + for i in range(read_word_size): + bit_list_of_word = [int(x) for x in str(bin(recv_data[i]))[2[:None]].rjust(bit, "0")] + bit_list_of_word.reverse() + bit_values += bit_list_of_word + data_index += 1 + + sidx = int(re.search("\\d.*", headdevice).group(0)) + bit_values = bit_values[sidx[:sidx + readsize]] + return (bit_values, None) + + def _device_write(self, headdevice, values): + """write in byte units. + """ + request_data = bytes() + request_data += STX.to_bytes(1, "big") + request_data += DEVICE_WRITE.to_bytes(1, "big") + devicecode = re.search("\\D+", headdevice).group(0) + devicenum = re.search("\\d.*", headdevice).group(0) + address = "{:x}".format(self._get_read_address(devicecode, devicenum)) + request_data += address.rjust(4, "0").upper().encode() + size_value = len(values) + size_value.to_bytes(2, "little", signed=False) + request_data += format(size_value, "x").rjust(2, "0").upper().encode() + for value in values: + value.to_bytes(1, "little", signed=False) + request_data += format(value, "x").rjust(2, "0").upper().encode() + + request_data += ETX.to_bytes(1, "big") + request_data += self._encode_value(self._get_sum_check_code(request_data[1[:None]]), "byte") + self.channel._send(request_data) + recv_data = self.channel._recv() + if recv_data != b'': + check_res = self._check_cmdanswer(recv_data) + if check_res: + return ([], check_res) + return (None, None) + + def batchwrite_wordunits(self, headdevice, values): + """batch write in word units. + """ + wirte_values = list() + for value in values: + value = value & 65535 + wirte_values.append(value & 255) + wirte_values.append(value >> 8) + + print(wirte_values) + recv_data, error = self._device_write(headdevice, wirte_values) + if error: + if error != "No response data.": + return ( + recv_data, error) + return (None, None) + + def _get_write_bit_addr(self, headdevice, bit): + devicecode = re.search("\\D+", headdevice).group(0) + devicenum = re.search("\\d.*", headdevice).group(0) + if devicecode == "Y": + return int(devicenum, 8) << 8 | 5 + if devicecode == "X": + return int(devicenum, 8) << 8 | 4 + if devicecode == "M": + return int(devicenum, 8) << 8 | 8 + if devicecode == "S": + return int(devicenum, 8) << 8 + + def _write_bit_device(self, cmd, addr): + """write in byte units. + """ + request_data = bytes() + request_data += STX.to_bytes(1, "big") + request_data += cmd.to_bytes(1, "big") + address = "{:x}".format(addr) + request_data += address.rjust(4, "0").upper().encode() + request_data += ETX.to_bytes(1, "big") + request_data += self._encode_value(self._get_sum_check_code(request_data[1[:None]]), "byte") + self.channel._send(request_data) + recv_data = self.channel._recv() + if recv_data != b'': + check_res = self._check_cmdanswer(recv_data) + if check_res: + return ([], check_res) + return (None, None) + + def batchwrite_bitunits(self, headdevice, values): + """batch read in bit units. + """ + for index, value in enumerate(values): + if not value == 0: + if not value == 1: + raise ValueError("Each value must be 0 or 1. 0 is OFF, 1 is ON.") + write_cmd = 55 if value == 1 else 56 + addess = self._get_write_bit_addr(headdevice, index) + + recv_data, error = self._write_bit_device(write_cmd, addess) + if error: + if error != "No response data.": + return ( + recv_data, error) + return (None, None) + + def get_plc_status(self): + request_data = bytes() + request_data += ENQ.to_bytes(1, "big") + self.channel._send(request_data) + recv_data = self.channel._recv() + if recv_data != b'': + if recv_data[0] == ACK: + return True + return False diff --git a/APPS_UNCOMPILED/lib/mcprotocol/mcconst.py b/APPS_UNCOMPILED/lib/mcprotocol/mcconst.py new file mode 100644 index 0000000..2951a6b --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/mcconst.py @@ -0,0 +1,566 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/mcconst.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 17502 bytes +"""This file defines mcprotocol constant. +""" +Q_SERIES = "Q" +L_SERIES = "L" +QnA_SERIES = "QnA" +iQL_SERIES = "iQ-L" +iQR_SERIES = "iQ-R" +BATCHREAD_WORDUNITS = 1 +BATCHREAD_BITUNITS = 2 +BATCHWRITE_WORDUNITS = 3 +BATCHWRITE_BITUNITS = 4 +RANDOMREAD = 5 +RANDOMWRITE = 6 +RANDOMWRITE_BITUNITS = 7 +RANDOMWRITE_WORDUNITS = 8 +COMMTYPE_BINARY = "binary" +COMMTYPE_ASCII = "ascii" +FORMAT_ONE = 1 +FORMAT_TWO = 2 +FORMAT_THREE = 3 +FORMAT_FOUR = 4 +FORMAT_FIVE = 5 +FOUR_C_FRAME = 17976 +THREE_C_FRAME = 17977 +TWO_C_FRAME = 17986 +ONE_C_FRAME = None +CTRL_CODE_STX = 2 +CTRL_CODE_ETX = 3 +CTRL_CODE_EOT = 4 +CTRL_CODE_ENQ = 5 +CTRL_CODE_ACK = 6 +CTRL_CODE_LF = 10 +CTRL_CODE_CL = 12 +CTRL_CODE_CR = 13 +CTRL_CODE_NAK = 21 + +class DeviceCodeError(Exception): + __doc__ = "devicecode error. Device is not exsist.\n " + + def __init__(self, plctype, devicename): + self.plctype = plctype + self.devicename = devicename + + def __str__(self): + error_txt = 'devicename: {} is not support {} series PLC.\nIf you enter hexadecimal device(X, Y, B, W, SB, SW, DX, DY, ZR) with only alphabet number\n(such as XFFF, device name is "X", device number is "FFF"),\nplease insert 0 between device name and device number.\neg: XFFF → X0FFF'.format(self.devicename, self.plctype) + return error_txt + + +class DeviceConstants: + __doc__ = "This class defines mc protocol deveice constatnt.\n " + SM_DEVICE = 145 + SD_DEVICE = 169 + X_DEVICE = 156 + Y_DEVICE = 157 + M_DEVICE = 144 + L_DEVICE = 146 + F_DEVICE = 147 + V_DEVICE = 148 + B_DEVICE = 160 + D_DEVICE = 168 + W_DEVICE = 180 + TS_DEVICE = 193 + TC_DEVICE = 192 + TN_DEVICE = 194 + SS_DEVICE = 199 + SC_DEVICE = 198 + SN_DEVICE = 200 + CS_DEVICE = 196 + CC_DEVICE = 195 + CN_DEVICE = 197 + SB_DEVICE = 161 + SW_DEVICE = 181 + DX_DEVICE = 162 + DY_DEVICE = 163 + R_DEVICE = 175 + ZR_DEVICE = 176 + LTS_DEVICE = 81 + LTC_DEVICE = 80 + LTN_DEVICE = 82 + LSTS_DEVICE = 89 + LSTC_DEVICE = 88 + LSTN_DEVICE = 90 + LCS_DEVICE = 85 + LCC_DEVICE = 84 + LCN_DEVICE = 86 + LZ_DEVICE = 98 + RD_DEVICE = 44 + BIT_DEVICE = "bit" + WORD_DEVICE = "word" + DWORD_DEVICE = "dword" + + def __init__(self): + """Constructor + """ + pass + + @staticmethod + def get_binary_devicecode(plctype, devicename): + """Static method that returns devicecode from device name. + """ + if devicename == "SM": + return ( + DeviceConstants.SM_DEVICE, 10) + if devicename == "SD": + return ( + DeviceConstants.SD_DEVICE, 10) + if devicename == "X": + return ( + DeviceConstants.X_DEVICE, 16) + if devicename == "Y": + return ( + DeviceConstants.Y_DEVICE, 16) + if devicename == "M": + return ( + DeviceConstants.M_DEVICE, 10) + if devicename == "L": + return ( + DeviceConstants.L_DEVICE, 10) + if devicename == "F": + return ( + DeviceConstants.F_DEVICE, 10) + if devicename == "V": + return ( + DeviceConstants.V_DEVICE, 10) + if devicename == "B": + return ( + DeviceConstants.B_DEVICE, 16) + if devicename == "D": + return ( + DeviceConstants.D_DEVICE, 10) + if devicename == "W": + return ( + DeviceConstants.W_DEVICE, 16) + if devicename == "TS": + return ( + DeviceConstants.TS_DEVICE, 10) + if devicename == "TC": + return ( + DeviceConstants.TC_DEVICE, 10) + if devicename == "TN": + return ( + DeviceConstants.TN_DEVICE, 10) + if devicename == "STS": + return ( + DeviceConstants.SS_DEVICE, 10) + if devicename == "STC": + return ( + DeviceConstants.SC_DEVICE, 10) + if devicename == "STN": + return ( + DeviceConstants.SN_DEVICE, 10) + if devicename == "CS": + return ( + DeviceConstants.CS_DEVICE, 10) + if devicename == "CC": + return ( + DeviceConstants.CC_DEVICE, 10) + if devicename == "CN": + return ( + DeviceConstants.CN_DEVICE, 10) + if devicename == "SB": + return ( + DeviceConstants.SB_DEVICE, 16) + if devicename == "SW": + return ( + DeviceConstants.SW_DEVICE, 16) + if devicename == "DX": + return ( + DeviceConstants.DX_DEVICE, 16) + if devicename == "DY": + return ( + DeviceConstants.DY_DEVICE, 16) + if devicename == "R": + return ( + DeviceConstants.R_DEVICE, 10) + if devicename == "ZR": + return ( + DeviceConstants.ZR_DEVICE, 16) + if devicename == "LTS": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LTS_DEVICE, 10) + if devicename == "LTC": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LTC_DEVICE, 10) + if devicename == "LTN": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LTN_DEVICE, 10) + if devicename == "LSTS": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LSTS_DEVICE, 10) + if devicename == "LSTN": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LSTN_DEVICE, 10) + if devicename == "LCS": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LCS_DEVICE, 10) + if devicename == "LCC": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LCC_DEVICE, 10) + if devicename == "LCN": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LCN_DEVICE, 10) + elif devicename == "LZ": + if plctype == iQR_SERIES: + return ( + DeviceConstants.LZ_DEVICE, 10) + if devicename == "RD" and plctype == iQR_SERIES: + return ( + DeviceConstants.RD_DEVICE, 10) + raise DeviceCodeError(plctype, devicename) + + @staticmethod + def get_ascii_devicecode(plctype, devicename): + """Static method that returns devicecode from device name. + """ + if plctype == iQR_SERIES: + padding = 4 + else: + padding = 2 + if devicename == "SM": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "SD": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "X": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "Y": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "M": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "L": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "F": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "V": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "B": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "D": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "W": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "TS": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "TC": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "TN": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "STS": + if plctype == iQR_SERIES: + return ( + "STS".ljust(padding, "*"), 10) + return ("SS".ljust(padding, "*"), 10) + else: + if devicename == "STC": + if plctype == iQR_SERIES: + return ( + "STC".ljust(padding, "*"), 10) + return ("SC".ljust(padding, "*"), 10) + else: + if devicename == "STN": + if plctype == iQR_SERIES: + return ( + "STN".ljust(padding, "*"), 10) + return ("SN".ljust(padding, "*"), 10) + else: + if devicename == "CS": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "CC": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "CN": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "SB": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "SW": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "DX": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "DY": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "R": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "ZR": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "LTS": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "LTC": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "LTN": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "LSTS": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "LSTN": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "LCS": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "LCC": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "LCN": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + elif devicename == "LZ": + if plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "RD" and plctype == iQR_SERIES: + return ( + devicename.ljust(padding, "*"), 10) + raise DeviceCodeError(plctype, devicename) + + @staticmethod + def get_devicetype(plctype, devicename): + """Static method that returns device type "bit" or "wrod" type. + """ + if devicename == "SM": + return DeviceConstants.BIT_DEVICE + if devicename == "SD": + return DeviceConstants.WORD_DEVICE + if devicename == "X": + return DeviceConstants.BIT_DEVICE + if devicename == "Y": + return DeviceConstants.BIT_DEVICE + if devicename == "M": + return DeviceConstants.BIT_DEVICE + if devicename == "L": + return DeviceConstants.BIT_DEVICE + if devicename == "F": + return DeviceConstants.BIT_DEVICE + if devicename == "V": + return DeviceConstants.BIT_DEVICE + if devicename == "B": + return DeviceConstants.BIT_DEVICE + if devicename == "D": + return DeviceConstants.WORD_DEVICE + if devicename == "W": + return DeviceConstants.WORD_DEVICE + if devicename == "TS": + return DeviceConstants.BIT_DEVICE + if devicename == "TC": + return DeviceConstants.BIT_DEVICE + if devicename == "TN": + return DeviceConstants.WORD_DEVICE + if devicename == "STS": + return DeviceConstants.BIT_DEVICE + if devicename == "STC": + return DeviceConstants.BIT_DEVICE + if devicename == "STN": + return DeviceConstants.WORD_DEVICE + if devicename == "CS": + return DeviceConstants.BIT_DEVICE + if devicename == "CC": + return DeviceConstants.BIT_DEVICE + if devicename == "CN": + return DeviceConstants.WORD_DEVICE + if devicename == "SB": + return DeviceConstants.BIT_DEVICE + if devicename == "SW": + return DeviceConstants.WORD_DEVICE + if devicename == "DX": + return DeviceConstants.BIT_DEVICE + if devicename == "DY": + return DeviceConstants.BIT_DEVICE + if devicename == "R": + return DeviceConstants.WORD_DEVICE + if devicename == "ZR": + return DeviceConstants.WORD_DEVICE + if devicename == "LTS": + if plctype == iQR_SERIES: + return DeviceConstants.BIT_DEVICE + if devicename == "LTC": + if plctype == iQR_SERIES: + return DeviceConstants.BIT_DEVICE + if devicename == "LTN": + if plctype == iQR_SERIES: + return DeviceConstants.BIT_DEVICE + if devicename == "LSTS": + if plctype == iQR_SERIES: + return DeviceConstants.BIT_DEVICE + if devicename == "LSTN": + if plctype == iQR_SERIES: + return DeviceConstants.DWORD_DEVICE + if devicename == "LCS": + if plctype == iQR_SERIES: + return DeviceConstants.BIT_DEVICE + if devicename == "LCC": + if plctype == iQR_SERIES: + return DeviceConstants.BIT_DEVICE + if devicename == "LCN": + if plctype == iQR_SERIES: + return DeviceConstants.DWORD_DEVICE + elif devicename == "LZ": + if plctype == iQR_SERIES: + return DeviceConstants.DWORD_DEVICE + if devicename == "RD" and plctype == iQR_SERIES: + return DeviceConstants.WORD_DEVICE + raise DeviceCodeError(plctype, devicename) + + +class OneCDeviceConstants: + X_DEVICE = 88 + Y_DEVICE = 89 + M_DEVICE = 77 + S_DEVICE = 83 + TS_DEVICE = 21587 + TN_DEVICE = 21582 + CS_DEVICE = 17235 + CN_DEVICE = 17230 + D_DEVICE = 68 + R_DEVICE = 82 + BIT_DEVICE = "bit" + WORD_DEVICE = "word" + DWORD_DEVICE = "dword" + + @staticmethod + def get_binary_devicecode(plctype, devicename): + """Static method that returns devicecode from device name. + """ + if devicename == "X": + return ( + OneCDeviceConstants.X_DEVICE, 16) + if devicename == "Y": + return ( + OneCDeviceConstants.Y_DEVICE, 16) + if devicename == "M": + return ( + OneCDeviceConstants.M_DEVICE, 10) + if devicename == "S": + return ( + OneCDeviceConstants.S_DEVICE, 10) + if devicename == "TS": + return ( + OneCDeviceConstants.TS_DEVICE, 10) + if devicename == "TN": + return ( + OneCDeviceConstants.TN_DEVICE, 10) + if devicename == "CS": + return ( + OneCDeviceConstants.CS_DEVICE, 10) + if devicename == "CN": + return ( + OneCDeviceConstants.CN_DEVICE, 10) + if devicename == "D": + return ( + OneCDeviceConstants.D_DEVICE, 10) + if devicename == "R": + return ( + OneCDeviceConstants.R_DEVICE, 10) + raise DeviceCodeError(plctype, devicename) + + @staticmethod + def get_ascii_devicecode(plctype, devicename): + """Static method that returns devicecode from device name. + """ + if plctype == iQR_SERIES: + padding = 4 + else: + padding = 1 + if devicename == "X": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "Y": + return ( + devicename.ljust(padding, "*"), 16) + if devicename == "M": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "S": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "TS": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "TN": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "CS": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "CN": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "D": + return ( + devicename.ljust(padding, "*"), 10) + if devicename == "R": + return ( + devicename.ljust(padding, "*"), 10) + raise DeviceCodeError(plctype, devicename) + + @staticmethod + def get_devicetype(plctype, devicename): + """Static method that returns device type "bit" or "wrod" type. + """ + if devicename == "X": + return DeviceConstants.BIT_DEVICE + if devicename == "Y": + return DeviceConstants.BIT_DEVICE + if devicename == "M": + return DeviceConstants.BIT_DEVICE + if devicename == "S": + return DeviceConstants.BIT_DEVICE + if devicename == "TS": + return DeviceConstants.BIT_DEVICE + if devicename == "TN": + return DeviceConstants.WORD_DEVICE + if devicename == "CS": + return DeviceConstants.BIT_DEVICE + if devicename == "CN": + return DeviceConstants.WORD_DEVICE + if devicename == "D": + return DeviceConstants.WORD_DEVICE + if devicename == "R": + return DeviceConstants.WORD_DEVICE + raise DeviceCodeError(plctype, devicename) diff --git a/APPS_UNCOMPILED/lib/mcprotocol/mcerror.py b/APPS_UNCOMPILED/lib/mcprotocol/mcerror.py new file mode 100644 index 0000000..4e91021 --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/mcerror.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/mcerror.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 585 bytes +"""This file is collection of mcprotocol error. +""" + +class MCProtocolError(Exception): + __doc__ = "devicecode error. Device is not exsist.\n " + + def __init__(self, errorcode): + self.errorcode = "0x" + format(errorcode, "x").rjust(4, "0").upper() + + def __str__(self): + return "mc protocol error: error code {}".format(self.errorcode) + + +def check_mcprotocol_error(status): + """Check mc protocol command error. + If errot exist(status != 0), raise Error. + """ + if status == 0: + return + raise MCProtocolError(status) diff --git a/APPS_UNCOMPILED/lib/mcprotocol/mcformat.py b/APPS_UNCOMPILED/lib/mcprotocol/mcformat.py new file mode 100644 index 0000000..f938079 --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/mcformat.py @@ -0,0 +1,320 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/mcformat.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 11167 bytes +""" +mcprotocol +Created on 2021/3/25 +@author: Lius +""" +import time, socket, logging +from . import mcconst as const +logger = logging.getLogger("in.mcprotocol") +logger.setLevel(logging.DEBUG) + +def twos_comp(val: int, mode: str='short'): + """compute the 2's complement of int value val""" + if mode == "byte": + bit = 8 + else: + if mode == "short": + bit = 16 + else: + if mode == "long": + bit = 32 + else: + raise ValueError("cannnot calculate 2's complement") + if val & 1 << bit - 1 != 0: + val = val - (1 << bit) + return val + + +class SerialModel: + + def __init__(self, serial, interchar_multiplier=1.5, interframe_multiplier=3.5, t0=None): + """Constructor""" + self._serial = serial + if t0: + self._t0 = t0 + else: + if self._serial.baudrate <= 19200: + self._t0 = 11.0 / self._serial.baudrate + else: + self._t0 = 0.0005 + self._serial.inter_byte_timeout = interchar_multiplier * self._t0 + self.set_timeout(interframe_multiplier * self._t0) + + def _do_open(self): + """Open the given serial port if not already opened""" + if not self._serial.is_open: + self._serial.open() + + def _do_close(self): + """Close the serial port if still opened""" + if self._serial.is_open: + self._serial.close() + return True + + def set_timeout(self, timeout_in_sec, use_sw_timeout=False): + """Change the timeout value""" + self._serial.timeout = timeout_in_sec + self.use_sw_timeout = use_sw_timeout + + def open(self): + """Connect to PLC""" + self._do_open() + self._is_connected = True + + def close(self): + """Close connection""" + self._do_close() + self._is_connected = False + + def _send(self, send_data): + """send mc protorocl data""" + logger.debug("_send: %s" % send_data) + if self._is_connected: + self._serial.reset_input_buffer() + self._serial.reset_output_buffer() + self._serial.write(send_data) + self._serial.flush() + else: + raise Exception("serial is not connected") + + def _recv(self, expected_length=-1): + """recieve mc protocol data""" + response = bytearray("", "ascii") + start_time = time.time() if self.use_sw_timeout else 0 + while 1: + read_bytes = self._serial.read(expected_length if expected_length > 0 else 1) + if self.use_sw_timeout: + read_duration = time.time() - start_time + else: + read_duration = 0 + if not read_bytes or read_duration > self._serial.timeout: + break + response += read_bytes + if expected_length >= 0 and len(response) >= expected_length: + break + + logger.debug("_recv: %s" % response) + return response + + +class SockteModel: + _SOCKBUFSIZE = 4096 + + def __init__(self, ip, port): + self._ip = ip + self._port = port + self._sock = None + self._timeout = 4 + self._is_connected = False + + def _do_open(self): + """Connect to the PLC""" + if self._sock: + self._sock.close() + self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.set_timeout(self._timeout) + self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self._sock.connect((self._ip, self._port)) + + def _do_close(self): + """Close the connection with the PLC""" + if self._sock: + self._sock.close() + self._sock = None + return True + + def set_timeout(self, timeout_in_sec): + """Change the timeout value""" + self._timeout = timeout_in_sec + if self._sock: + self._sock.setblocking(timeout_in_sec > 0) + if timeout_in_sec: + self._sock.settimeout(timeout_in_sec) + + def connect(self): + """Connect to PLC""" + self._do_open() + self._is_connected = True + + def close(self): + """Close connection""" + self._do_close() + self._is_connected = False + + def _send(self, send_data): + """send mc protorocl data""" + if self._is_connected: + logger.info("_send: %s" % send_data) + self._sock.settimeout(self._timeout + 1) + self._sock.send(send_data) + else: + raise Exception("socket is not connected") + + def _recv(self): + """recieve mc protocol data""" + recv_data = self._sock.recv(self._SOCKBUFSIZE) + logger.info("_recv: %s" % recv_data) + return recv_data + + +class PkgFormat: + + def __init__(self, frame_id=None, is_sum_check=False): + self.frame_id = frame_id + self.is_sum_check = is_sum_check + + def _encode_value(self, value, mode='short', isSigned=False): + """encode mc protocol value data to byte. + """ + try: + if mode == "byte": + value = value & 255 + value.to_bytes(1, "little", signed=isSigned) + value_byte = format(value, "x").rjust(2, "0").upper().encode() + else: + if mode == "short": + value = value & 65535 + value.to_bytes(2, "little", signed=isSigned) + value_byte = format(value, "x").rjust(4, "0").upper().encode() + else: + if mode == "long": + value = value & 4294967295L + value.to_bytes(4, "little", signed=isSigned) + value_byte = format(value, "x").rjust(8, "0").upper().encode() + else: + raise ValueError("Please input value type") + except: + raise ValueError("Exceeeded Device value range") + + return value_byte + + def _decode_value(self, byte, mode='short', isSigned=False): + """decode byte to value + """ + try: + value = int(byte.decode(), 16) + if isSigned: + value = twos_comp(value, mode) + except: + raise ValueError("Could not decode byte to value") + + return value + + def _get_sum_check_code(self, calc_data): + """ Get sum check code. + """ + res = 0 + for byte in calc_data: + res += byte + + return res + + def _make_senddata(self, accessroute, requestdata): + """Makes send mc protorocl data. + """ + mc_data = bytes() + header = const.CTRL_CODE_ENQ + mc_data += header.to_bytes(1, "big") + if self.frame_id is not None: + mc_data += self.frame_id.to_bytes(2, "big") + mc_data += accessroute + mc_data += requestdata + if self.is_sum_check: + print(self._encode_value(self._get_sum_check_code(mc_data[1[:None]]), "byte")) + mc_data += self._encode_value(self._get_sum_check_code(mc_data[1[:None]]), "byte") + return mc_data + + def _get_answerstatus_index(self): + """Get command status index from return data byte. + """ + if self.frame_id == const.ONE_C_FRAME: + return 6 + if self.frame_id == const.THREE_C_FRAME: + return 11 + if self.frame_id == const.FOUR_C_FRAME: + return 17 + return 0 + + def _get_answerdata_index(self): + """Get answer data index from return data byte. + """ + if self.frame_id == const.ONE_C_FRAME: + return 5 + if self.frame_id == const.THREE_C_FRAME: + return 11 + if self.frame_id == const.FOUR_C_FRAME: + return 17 + return 0 + + def _check_cmdanswer(self, recv_data): + """check command answer. If answer status is not 0, raise error according to answer + """ + if recv_data == b'': + return "No response data" + if recv_data[0] == const.CTRL_CODE_ACK or recv_data[0] == const.CTRL_CODE_STX: + return + if recv_data[0] == const.CTRL_CODE_NAK: + error_code = recv_data[self._get_answerstatus_index()[:self._get_answerstatus_index() + 4]] + return "Error code: %s" % error_code + return "Unknown response" + + +class PkgFormat4(PkgFormat): + + def __init__(self, frame_id=None, is_sum_check=False): + super().__init__(frame_id=frame_id, is_sum_check=is_sum_check) + + def _make_senddata(self, accessroute, requestdata): + """Makes send mc protorocl data. + """ + mc_data = bytes() + header = const.CTRL_CODE_ENQ + mc_data += header.to_bytes(1, "big") + if self.frame_id is not None: + mc_data += self.frame_id.to_bytes(2, "big") + mc_data += accessroute + mc_data += requestdata + if self.is_sum_check: + mc_data += self._encode_value(self._get_sum_check_code(mc_data[1[:None]]), "byte") + mc_data += const.CTRL_CODE_CR.to_bytes(1, "big") + mc_data += const.CTRL_CODE_LF.to_bytes(1, "big") + return mc_data + + def _get_answerstatus_index(self): + """Get command status index from return data byte. + """ + if self.frame_id == const.ONE_C_FRAME: + return 6 + if self.frame_id == const.THREE_C_FRAME: + return 11 + if self.frame_id == const.FOUR_C_FRAME: + return 17 + return 0 + + def _get_answerdata_index(self): + """Get answer data index from return data byte. + """ + if self.frame_id == const.ONE_C_FRAME: + return 5 + if self.frame_id == const.THREE_C_FRAME: + return 11 + if self.frame_id == const.FOUR_C_FRAME: + return 17 + return 0 + + def _check_cmdanswer(self, recv_data): + if recv_data == b'': + return "No response data" + if recv_data[0] == const.CTRL_CODE_ACK or recv_data[0] == const.CTRL_CODE_STX: + return + if recv_data[0] == const.CTRL_CODE_NAK: + error_code = recv_data[self._get_answerstatus_index()[:self._get_answerstatus_index() + 4]] + return "Error code: %s" % error_code + return "Unknown response" diff --git a/APPS_UNCOMPILED/lib/mcprotocol/type1c.py b/APPS_UNCOMPILED/lib/mcprotocol/type1c.py new file mode 100644 index 0000000..6405a19 --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/type1c.py @@ -0,0 +1,241 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/type1c.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 9548 bytes +""" +mcprotocol +Created on 2021/3/25 +@author: Lius +""" +import re +from .mcformat import * +from . import mcconst as const + +class Type1C: + + def __init__(self, pkg_format, channel, station_no=0, pc_no=255): + self.station_no = station_no + self.pc_no = pc_no + self.pkg_format = pkg_format + self.channel = channel + self.plctype = const.Q_SERIES + self._wordsize = 4 + + def _make_commanddata(self, command): + """make mc protocol command data + """ + command_data = bytes() + command_data += command.to_bytes(2, "big") + return command_data + + def _make_messagewait(self, wait): + """make mc protocol message wait data + """ + wait_data = bytes() + wait_data += wait.to_bytes(1, "big") + return wait_data + + def _make_accessroute(self): + route_data = bytes() + route_data += self.pkg_format._encode_value(self.station_no, "byte") + route_data += self.pkg_format._encode_value(self.pc_no, "byte") + return route_data + + def _interpret_device(self, device): + """Get device code and device number. + device number is converted to base number for each device. + + Args: + device(str): device. (ex: "D1000", "Y1") + + Returns: + devicecode(str or int): if self.commtype is ascii, returns str devicode, else, returns int devicode + devicenum(str or int): if self.commtype is ascii, returns str devicenum, else, returns int devicenum + """ + devicetype = re.search("\\D+", device).group(0) + devicecode = devicetype + devicenum = re.search("\\d.*", device).group(0) + return (devicecode, devicenum) + + def _get_answerstatus_index(self): + """Get command status index from return data byte. + """ + return 6 + + def batchread_wordunits(self, headdevice, readsize): + """batch read in word units. + """ + self._currentcmd = const.BATCHREAD_WORDUNITS + command = 22354 + message_wait = 51 + request_data = bytes() + request_data += self._make_commanddata(command) + request_data += self._make_messagewait(message_wait) + devicecode, devicenum = self._interpret_device(headdevice) + request_data += devicecode.encode() + if len(devicecode) == 1: + request_data += devicenum.rjust(4, "0").upper().encode() + else: + request_data += devicenum.rjust(3, "0").upper().encode() + request_data += self.pkg_format._encode_value(readsize, "byte") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + print(send_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + print(recv_data) + check_res = self.pkg_format._check_cmdanswer(recv_data) + if check_res: + return ([], check_res) + word_values = [] + data_index = self.pkg_format._get_answerdata_index() + for _ in range(readsize): + wordvalue = self.pkg_format._decode_value((recv_data[data_index[:data_index + self._wordsize]]), mode="short") + word_values.append(wordvalue) + data_index += self._wordsize + + return ( + word_values, None) + + def batchread_bitunits(self, headdevice, readsize): + """batch read in bit units. + """ + self._currentcmd = const.BATCHREAD_BITUNITS + command = 16978 + message_wait = 51 + request_data = bytes() + request_data += self._make_commanddata(command) + request_data += self._make_messagewait(message_wait) + devicecode, devicenum = self._interpret_device(headdevice) + request_data += devicecode.encode() + if len(devicecode) == 1: + request_data += devicenum.rjust(4, "0").upper().encode() + else: + request_data += devicenum.rjust(3, "0").upper().encode() + request_data += self.pkg_format._encode_value(readsize, "byte") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + print(send_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + print(recv_data) + check_res = self.pkg_format._check_cmdanswer(recv_data) + if check_res: + return ([], check_res) + bit_values = [] + data_index = self.pkg_format._get_answerdata_index() + byte_range = 1 + for i in range(readsize): + bitvalue = int(recv_data[data_index[:data_index + byte_range]].decode()) + bit_values.append(bitvalue) + data_index += byte_range + + return ( + bit_values, None) + + def batchwrite_wordunits(self, headdevice, values): + """batch write in word units. + + Args: + headdevice(str): Write head device. (ex: "D1000") + values(list[int]): Write values. + + """ + write_size = len(values) + self._currentcmd = const.BATCHWRITE_WORDUNITS + command = 22359 + message_wait = 51 + request_data = bytes() + request_data += self._make_commanddata(command) + request_data += self._make_messagewait(message_wait) + devicecode, devicenum = self._interpret_device(headdevice) + request_data += devicecode.encode() + if len(devicecode) == 1: + request_data += devicenum.rjust(4, "0").upper().encode() + else: + request_data += devicenum.rjust(3, "0").upper().encode() + request_data += self.pkg_format._encode_value(write_size, "byte") + for value in values: + request_data += self.pkg_format._encode_value(value) + + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + print(send_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + check_res = self.pkg_format._check_cmdanswer(recv_data) + if check_res: + if check_res != "No response data": + return ([], check_res) + return (None, None) + + def batchwrite_bitunits(self, headdevice, values): + """batch read in bit units. + """ + write_size = len(values) + for value in values: + if not value == 0 or value == 1: + raise ValueError("Each value must be 0 or 1. 0 is OFF, 1 is ON.") + + self._currentcmd = const.BATCHWRITE_BITUNITS + command = 16983 + message_wait = 51 + request_data = bytes() + request_data += self._make_commanddata(command) + request_data += self._make_messagewait(message_wait) + devicecode, devicenum = self._interpret_device(headdevice) + request_data += devicecode.encode() + if len(devicecode) == 1: + request_data += devicenum.rjust(4, "0").upper().encode() + else: + request_data += devicenum.rjust(3, "0").upper().encode() + request_data += self.pkg_format._encode_value(write_size, "byte") + for value in values: + request_data += str(value).encode() + + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + print(send_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + check_res = self.pkg_format._check_cmdanswer(recv_data) + if check_res: + if check_res != "No response data": + return ( + None, check_res) + return (None, None) + + def remote_run(self): + """Run PLC + """ + command = 21074 + message_wait = 48 + request_data = bytes() + request_data += self._make_commanddata(command) + request_data += self._make_messagewait(message_wait) + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + + def remote_stop(self): + """ Stop remotely. + """ + command = 21075 + message_wait = 48 + request_data = bytes() + request_data += self._make_commanddata(command) + request_data += self._make_messagewait(message_wait) + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + + def read_cputype(self): + """Read CPU type + """ + command = 20547 + message_wait = 48 + request_data = bytes() + request_data += self._make_commanddata(command) + request_data += self._make_messagewait(message_wait) + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + print(recv_data) + return recv_data diff --git a/APPS_UNCOMPILED/lib/mcprotocol/type3c.py b/APPS_UNCOMPILED/lib/mcprotocol/type3c.py new file mode 100644 index 0000000..0f3a769 --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/type3c.py @@ -0,0 +1,308 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/type3c.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 12555 bytes +""" +mcprotocol +Created on 2021/3/25 +@author: Lius +""" +import re +from .mcformat import * +from . import mcconst as const + +class Type3C: + + def __init__(self, pkg_format, channel, station_no=0, network_no=0, pc_no=255, self_station_no=0): + self.station_no = station_no + self.network_no = network_no + self.pc_no = pc_no + self.self_station_no = self_station_no + self.pkg_format = pkg_format + self.channel = channel + self.plctype = const.Q_SERIES + self._wordsize = 4 + + def set_plctype(self, plctype): + """Check PLC type. If plctype is vaild, set self.commtype. + """ + if plctype == "Q": + self.plctype = const.Q_SERIES + else: + if plctype == "L": + self.plctype = const.L_SERIES + else: + if plctype == "QnA": + self.plctype = const.QnA_SERIES + else: + if plctype == "iQ-L": + self.plctype = const.iQL_SERIES + else: + if plctype == "iQ-R": + self.plctype = const.iQR_SERIES + else: + raise ValueError("plctype must be 'Q', 'L', 'QnA', 'iQ-L' or 'iQ-R'") + + def _make_commanddata(self, command, subcommand): + """make mc protocol command data + """ + command_data = bytes() + command_data += self.pkg_format._encode_value(command, "short") + command_data += self.pkg_format._encode_value(subcommand, "short") + return command_data + + def _make_devicedata(self, device): + """make mc protocol device data. (device code and device number) + """ + device_data = bytes() + devicecode, devicenum = self._interpret_device(device) + if self.plctype is const.iQR_SERIES: + device_data += devicecode.encode() + device_data += devicenum.rjust(8, "0").upper().encode() + else: + device_data += devicecode.encode() + device_data += devicenum.rjust(6, "0").upper().encode() + return device_data + + def _make_accessroute(self): + route_data = bytes() + route_data += self.pkg_format._encode_value(self.station_no, "byte") + route_data += self.pkg_format._encode_value(self.network_no, "byte") + route_data += self.pkg_format._encode_value(self.pc_no, "byte") + route_data += self.pkg_format._encode_value(self.self_station_no, "byte") + return route_data + + def _interpret_device(self, device): + """Get device code and device number. + device number is converted to base number for each device. + """ + devicetype = re.search("\\D+", device).group(0) + devicecode, _ = const.DeviceConstants.get_ascii_devicecode(self.plctype, devicetype) + devicenum = re.search("\\d.*", device).group(0) + return (devicecode, devicenum) + + def _transport_data(self, send_data): + self.channel._send(send_data) + recv_data = self.channel._recv() + check_res = self.pkg_format._check_cmdanswer(recv_data) + if check_res: + return ([], check_res) + return ( + recv_data, None) + + def batchread_wordunits(self, headdevice, readsize): + """batch read in word units. + """ + self._currentcmd = const.BATCHREAD_WORDUNITS + command = 1025 + if self.plctype == const.iQR_SERIES: + subcommand = 2 + else: + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._make_devicedata(headdevice) + request_data += self.pkg_format._encode_value(readsize, "short") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + recv_data, error = self._transport_data(send_data) + if error: + return ( + recv_data, error) + word_values = [] + data_index = self.pkg_format._get_answerdata_index() + for _ in range(readsize): + wordvalue = self.pkg_format._decode_value((recv_data[data_index[:data_index + self._wordsize]]), mode="short") + word_values.append(wordvalue) + data_index += self._wordsize + + return ( + word_values, None) + + def batchread_bitunits(self, headdevice, readsize): + """batch read in bit units. + """ + self._currentcmd = const.BATCHREAD_BITUNITS + command = 1025 + if self.plctype == const.iQR_SERIES: + subcommand = 3 + else: + subcommand = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._make_devicedata(headdevice) + request_data += self.pkg_format._encode_value(readsize, "short") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + recv_data, error = self._transport_data(send_data) + if error: + return ( + recv_data, error) + bit_values = [] + data_index = self.pkg_format._get_answerdata_index() + byte_range = 1 + for _ in range(readsize): + bitvalue = int(recv_data[data_index[:data_index + byte_range]].decode()) + bit_values.append(bitvalue) + data_index += byte_range + + return ( + bit_values, None) + + def batchwrite_wordunits(self, headdevice, values): + """batch write in word units. + + Args: + headdevice(str): Write head device. (ex: "D1000") + values(list[int]): Write values. + + """ + write_size = len(values) + self._currentcmd = const.BATCHWRITE_WORDUNITS + command = 5121 + if self.plctype == const.iQR_SERIES: + subcommand = 2 + else: + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._make_devicedata(headdevice) + request_data += self.pkg_format._encode_value(write_size, "short") + for value in values: + request_data += self.pkg_format._encode_value(value) + + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + recv_data, error = self._transport_data(send_data) + if error: + if error != "No response data": + return ( + recv_data, error) + return (None, None) + + def batchwrite_bitunits(self, headdevice, values): + """batch read in bit units. + """ + write_size = len(values) + for value in values: + if not value == 0 or value == 1: + raise ValueError("Each value must be 0 or 1. 0 is OFF, 1 is ON.") + + self._currentcmd = const.BATCHWRITE_BITUNITS + command = 5121 + if self.plctype == const.iQR_SERIES: + subcommand = 3 + else: + subcommand = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._make_devicedata(headdevice) + request_data += self.pkg_format._encode_value(write_size, "short") + for value in values: + request_data += str(value).encode() + + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + recv_data, error = self._transport_data(send_data) + if error: + if error != "No response data": + return ( + recv_data, error) + return (None, None) + + def remote_run(self, clear_mode, force_exec=False): + """Run PLC + """ + if not clear_mode == 0: + if not clear_mode == 1: + if not clear_mode == 2: + raise ValueError("clear_device must be 0, 1 or 2. 0: does not clear. 1: clear except latch device. 2: clear all.") + elif not force_exec is True: + if not force_exec is False: + raise ValueError("force_exec must be True or False") + command = 4097 + subcommand = 0 + if force_exec: + mode = 3 + else: + mode = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self.pkg_format._encode_value(mode, mode="short") + request_data += self.pkg_format._encode_value(clear_mode, mode="byte") + request_data += self.pkg_format._encode_value(0, mode="byte") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + + def remote_stop(self): + """ Stop remotely. + """ + command = 4098 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self.pkg_format._encode_value(1, mode="short") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + + def remote_pause(self, force_exec=False): + """pause PLC remotely. + """ + if not force_exec is True: + if not force_exec is False: + raise ValueError("force_exec must be True or False") + else: + command = 4099 + subcommand = 0 + if force_exec: + mode = 3 + else: + mode = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self.pkg_format._encode_value(mode, mode="short") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + + def remote_latchclear(self): + """Clear latch remotely. + PLC must be stop when use this command. + """ + command = 4101 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self.pkg_format._encode_value(1, mode="short") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + + def remote_reset(self): + """Reset remotely. + PLC must be stop when use this command. + + """ + command = 4102 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self.pkg_format._encode_value(1, mode="short") + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + + def read_cputype(self): + """Read CPU type + + Returns: + CPU type(str): CPU type + CPU code(str): CPU code (4 length number) + + """ + command = 20547 + message_wait = 48 + request_data = bytes() + request_data += self._make_commanddata(command) + request_data += self._make_messagewait(message_wait) + send_data = self.pkg_format._make_senddata(self._make_accessroute(), request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + print(recv_data) + return recv_data diff --git a/APPS_UNCOMPILED/lib/mcprotocol/type3e.py b/APPS_UNCOMPILED/lib/mcprotocol/type3e.py new file mode 100644 index 0000000..714ea22 --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/type3e.py @@ -0,0 +1,725 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/type3e.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 30447 bytes +""" +mcprotocol +Created on 2021/3/25 +@author: Lius +""" +import re +from .mcformat import * +from . import mcconst as const + +class Type3E: + __doc__ = "mcprotocol 3E communication class.\n " + + def __init__(self, channel, network_no=0, pc_no=255, dest_moduleio=1023, dest_modulesta=0): + self.channel = channel + self.network_no = network_no + self.pc_no = pc_no + self.dest_moduleio = dest_moduleio + self.dest_modulesta = dest_modulesta + self.subheader = 20480 + self.plctype = const.Q_SERIES + self.commtype = const.COMMTYPE_BINARY + self._wordsize = 2 + self.timer = 4 + + def _interpret_device(self, device): + """Get device code and device number. + """ + devicetype = re.search("\\D+", device).group(0) + if self.commtype == const.COMMTYPE_BINARY: + devicecode, devicebase = const.DeviceConstants.get_binary_devicecode(self.plctype, devicetype) + devicenum = int(re.search("\\d.*", device).group(0), devicebase) + else: + devicecode, _ = const.DeviceConstants.get_ascii_devicecode(self.plctype, devicetype) + devicenum = re.search("\\d.*", device).group(0) + return ( + devicecode, devicenum) + + def _set_plctype(self, plctype): + """Check PLC type. If plctype is vaild, set self.commtype. + """ + if plctype == "Q": + self.plctype = const.Q_SERIES + else: + if plctype == "L": + self.plctype = const.L_SERIES + else: + if plctype == "QnA": + self.plctype = const.QnA_SERIES + else: + if plctype == "iQ-L": + self.plctype = const.iQL_SERIES + else: + if plctype == "iQ-R": + self.plctype = const.iQR_SERIES + else: + raise ValueError("plctype must be 'Q', 'L', 'QnA', 'iQ-L' or 'iQ-R'") + + def _set_commtype(self, commtype): + """Check communication type. If commtype is vaild, set self.commtype. + """ + if commtype == "binary": + self.commtype = const.COMMTYPE_BINARY + self._wordsize = 2 + else: + if commtype == "ascii": + self.commtype = const.COMMTYPE_ASCII + self._wordsize = 4 + else: + raise ValueError("communication type must be 'binary' or 'ascii'") + + def _get_answerdata_index(self): + """Get answer data index from return data byte. + """ + if self.commtype == const.COMMTYPE_BINARY: + return 11 + return 22 + + def _get_answerstatus_index(self): + """Get command status index from return data byte. + """ + if self.commtype == const.COMMTYPE_BINARY: + return 9 + return 18 + + def setaccessopt(self, commtype=None, network=None, pc=None, dest_moduleio=None, dest_modulesta=None, timer_sec=None): + """Set mc protocol access option. + """ + if commtype: + self._set_commtype(commtype) + elif network: + try: + network.to_bytes(1, "little") + self.network_no = network + except: + raise ValueError("network must be 0 <= network <= 255") + + if pc: + try: + pc.to_bytes(1, "little") + self.pc_no = pc + except: + raise ValueError("pc must be 0 <= pc <= 255") + + if dest_moduleio: + try: + dest_moduleio.to_bytes(2, "little") + self.dest_moduleio = dest_moduleio + except: + raise ValueError("dest_moduleio must be 0 <= dest_moduleio <= 65535") + + elif dest_modulesta: + try: + dest_modulesta.to_bytes(1, "little") + self.dest_modulesta = dest_modulesta + except: + raise ValueError("dest_modulesta must be 0 <= dest_modulesta <= 255") + + if timer_sec: + try: + timer_250msec = 4 * timer_sec + timer_250msec.to_bytes(2, "little") + self.timer = timer_250msec + except: + raise ValueError("timer_sec must be 0 <= timer_sec <= 16383, / sec") + + def _make_senddata(self, requestdata): + """Makes send mc protorocl data. + """ + mc_data = bytes() + if self.commtype == const.COMMTYPE_BINARY: + mc_data += self.subheader.to_bytes(2, "big") + else: + mc_data += format(self.subheader, "x").ljust(4, "0").upper().encode() + mc_data += self._encode_value(self.network_no, "byte") + mc_data += self._encode_value(self.pc_no, "byte") + mc_data += self._encode_value(self.dest_moduleio, "short") + mc_data += self._encode_value(self.dest_modulesta, "byte") + mc_data += self._encode_value(self._wordsize + len(requestdata), "short") + mc_data += self._encode_value(self.timer, "short") + mc_data += requestdata + return mc_data + + def _make_commanddata(self, command, subcommand): + """make mc protocol command and subcommand data + """ + command_data = bytes() + command_data += self._encode_value(command, "short") + command_data += self._encode_value(subcommand, "short") + return command_data + + def _make_devicedata(self, device): + """make mc protocol device data. (device code and device number) + """ + device_data = bytes() + devicecode, devicenum = self._interpret_device(device) + if self.commtype is const.COMMTYPE_BINARY: + if self.plctype is const.iQR_SERIES: + device_data += devicenum.to_bytes(4, "little") + device_data += devicecode.to_bytes(2, "little") + else: + device_data += devicenum.to_bytes(3, "little") + device_data += devicecode.to_bytes(1, "little") + else: + if self.plctype is const.iQR_SERIES: + device_data += devicecode.encode() + device_data += devicenum.rjust(8, "0").upper().encode() + else: + device_data += devicecode.encode() + device_data += devicenum.rjust(6, "0").upper().encode() + return device_data + + def _encode_value(self, value, mode='short', isSigned=False): + """encode mc protocol value data to byte. + """ + try: + if self.commtype == const.COMMTYPE_BINARY: + if mode == "byte": + value_byte = value.to_bytes(1, "little", signed=isSigned) + else: + if mode == "short": + value_byte = value.to_bytes(2, "little", signed=isSigned) + else: + if mode == "long": + value_byte = value.to_bytes(4, "little", signed=isSigned) + else: + raise ValueError("Please input value type") + else: + if mode == "byte": + value.to_bytes(1, "little", signed=isSigned) + value = value & 255 + value_byte = format(value, "x").rjust(2, "0").upper().encode() + else: + if mode == "short": + value.to_bytes(2, "little", signed=isSigned) + value = value & 65535 + value_byte = format(value, "x").rjust(4, "0").upper().encode() + else: + if mode == "long": + value.to_bytes(4, "little", signed=isSigned) + value = value & 4294967295L + value_byte = format(value, "x").rjust(8, "0").upper().encode() + else: + raise ValueError("Please input value type") + except: + raise ValueError("Exceeeded Device value range") + + return value_byte + + def _encode_devicevalue(self, value, device, mode='short'): + """encode mc protocol device value data to bytes. + """ + devicename = re.search("\\D+", device).group(0) + devicetype = const.DeviceConstants.get_devicetype(self.plctype, devicename) + try: + if self.commtype == const.COMMTYPE_BINARY: + if mode == "byte": + value_byte = value.to_bytes(1, "little", signed=True) + else: + if mode == "short": + value_byte = value.to_bytes(2, "little", signed=True) + else: + if mode == "long": + value_byte = value.to_bytes(4, "little", signed=True) + else: + raise ValueError("Please input value type") + else: + if mode == "byte": + value.to_bytes(1, "little", signed=True) + value = value & 255 + value_byte = format(value, "x").rjust(2, "0").upper().encode() + else: + if mode == "short": + value.to_bytes(2, "little", signed=True) + value = value & 65535 + value_byte = format(value, "x").rjust(4, "0").upper().encode() + else: + if mode == "long": + value.to_bytes(4, "little", signed=True) + value = value & 4294967295L + value_byte = format(value, "x").rjust(8, "0").upper().encode() + else: + raise ValueError("Please input value type") + except: + raise ValueError("Exceeeded Device value range") + + return value_byte + + def _decode_value(self, byte, mode='short', isSigned=False): + """decode byte to value + """ + try: + if self.commtype == const.COMMTYPE_BINARY: + value = int.from_bytes(byte, "little", signed=isSigned) + else: + value = int(byte.decode(), 16) + if isSigned: + value = twos_comp(value, mode) + except: + raise ValueError("Could not decode byte to value") + + return value + + def _decode_devicevalue(self, byte, device, mode='short'): + """decode device data byte to value + """ + devicename = re.search("\\D+", device).group(0) + devicetype = const.DeviceConstants.get_devicetype(self.plctype, devicename) + try: + if self.commtype == const.COMMTYPE_BINARY: + if devicetype == const.DeviceConstants.BIT_DEVICE: + if mode == "byte": + value = int.from_bytes(byte, "big", signed=True) + else: + if mode == "short": + value_byte = bytes() + value_byte += byte[1[:2]] + value_byte += byte[0[:1]] + value = int.from_bytes(value_byte, "big", signed=True) + else: + if mode == "long": + value_byte = bytes() + value_byte += byte[3[:4]] + value_byte += byte[2[:3]] + value_byte += byte[1[:2]] + value_byte += byte[0[:1]] + value = int.from_bytes(value_byte, "big", signed=True) + else: + raise ValueError("Please input value type") + else: + value = int.from_bytes(byte, "little", signed=True) + else: + value = int(byte.decode(), 16) + value = twos_comp(value, mode) + except: + raise ValueError("Could not decode byte to value") + + return value + + def _check_cmdanswer(self, recv_data): + """check command answer. If answer status is not 0, raise error according to answer + + """ + answerstatus_index = self._get_answerstatus_index() + answerstatus = self._decode_value(recv_data[answerstatus_index[:answerstatus_index + self._wordsize]], "short") + + def batchread_wordunits(self, headdevice, readsize): + """batch read in word units. + """ + self._currentcmd = const.BATCHREAD_WORDUNITS + command = 1025 + if self.plctype == const.iQR_SERIES: + subcommand = 2 + else: + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._make_devicedata(headdevice) + request_data += self._encode_value(readsize) + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + word_values = [] + data_index = self._get_answerdata_index() + for _ in range(readsize): + wordvalue = self._decode_devicevalue((recv_data[data_index[:data_index + self._wordsize]]), headdevice, mode="short") + word_values.append(wordvalue) + data_index += self._wordsize + + return ( + word_values, None) + + def batchread_bitunits(self, headdevice, readsize): + """batch read in bit units. + """ + self._currentcmd = const.BATCHREAD_BITUNITS + command = 1025 + if self.plctype == const.iQR_SERIES: + subcommand = 3 + else: + subcommand = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._make_devicedata(headdevice) + request_data += self._encode_value(readsize) + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + bit_values = [] + if self.commtype == const.COMMTYPE_BINARY: + for i in range(readsize): + data_index = i // 2 + self._get_answerdata_index() + value = int.from_bytes(recv_data[data_index[:data_index + 1]], "little") + if i % 2 == 0: + bitvalue = 1 if value & 16 else 0 + else: + bitvalue = 1 if value & 1 else 0 + bit_values.append(bitvalue) + + else: + data_index = self._get_answerdata_index() + byte_range = 1 + for i in range(readsize): + bitvalue = int(recv_data[data_index[:data_index + byte_range]].decode()) + bit_values.append(bitvalue) + data_index += byte_range + + return ( + bit_values, None) + + def batchwrite_wordunits(self, headdevice, values): + """batch write in word units. + """ + write_size = len(values) + self._currentcmd = const.BATCHWRITE_WORDUNITS + command = 5121 + if self.plctype == const.iQR_SERIES: + subcommand = 2 + else: + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._make_devicedata(headdevice) + request_data += self._encode_value(write_size) + for value in values: + request_data += self._encode_devicevalue(value, headdevice) + + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + return (None, None) + + def batchwrite_bitunits(self, headdevice, values): + """batch read in bit units. + """ + write_size = len(values) + for value in values: + if not value == 0 or value == 1: + raise ValueError("Each value must be 0 or 1. 0 is OFF, 1 is ON.") + + self._currentcmd = const.BATCHWRITE_BITUNITS + command = 5121 + if self.plctype == const.iQR_SERIES: + subcommand = 3 + else: + subcommand = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._make_devicedata(headdevice) + request_data += self._encode_value(write_size) + if self.commtype == const.COMMTYPE_BINARY: + bit_data = [0 for _ in range((len(values) + 1) // 2)] + for index, value in enumerate(values): + value_index = index // 2 + bit_index = 4 if index % 2 == 0 else 0 + bit_value = value << bit_index + bit_data[value_index] |= bit_value + + request_data += bytes(bit_data) + else: + for value in values: + request_data += str(value).encode() + + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + return (None, None) + + def randomread(self, word_devices, dword_devices): + """read word units and dword units randomly. + """ + self._currentcmd = const.RANDOMREAD + command = 1027 + if self.plctype == const.iQR_SERIES: + subcommand = 2 + else: + subcommand = 0 + word_size = len(word_devices) + dword_size = len(dword_devices) + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value(word_size, mode="byte") + request_data += self._encode_value(dword_size, mode="byte") + for word_device in word_devices: + request_data += self._make_devicedata(word_device) + + for dword_device in dword_devices: + request_data += self._make_devicedata(dword_device) + + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + data_index = self._get_answerdata_index() + word_values = [] + dword_values = [] + for word_device in word_devices: + wordvalue = self._decode_devicevalue((recv_data[data_index[:data_index + self._wordsize]]), word_device, mode="short") + word_values.append(wordvalue) + data_index += self._wordsize + + for dword_device in dword_devices: + dwordvalue = self._decode_devicevalue((recv_data[data_index[:data_index + self._wordsize * 2]]), dword_device, mode="long") + dword_values.append(dwordvalue) + data_index += self._wordsize * 2 + + return ( + word_values, dword_values) + + def randomwrite(self, word_devices, word_values, dword_devices, dword_values): + """write word units and dword units randomly. + """ + if len(word_devices) != len(word_values): + raise ValueError("word_devices and word_values must be same length") + elif len(dword_devices) != len(dword_values): + raise ValueError("dword_devices and dword_values must be same length") + word_size = len(word_devices) + dword_size = len(dword_devices) + self._currentcmd = const.RANDOMWRITE + command = 5122 + if self.plctype == const.iQR_SERIES: + subcommand = 2 + else: + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value(word_size, mode="byte") + request_data += self._encode_value(dword_size, mode="byte") + for word_device, word_value in zip(word_devices, word_values): + request_data += self._make_devicedata(word_device) + request_data += self._encode_devicevalue(word_value, word_device, mode="short") + + for dword_device, dword_value in zip(dword_devices, dword_values): + request_data += self._make_devicedata(dword_device) + request_data += self._encode_devicevalue(dword_value, dword_device, mode="long") + + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + + def randomwrite_bitunits(self, bit_devices, values): + """write bit units randomly. + """ + if len(bit_devices) != len(values): + raise ValueError("bit_devices and values must be same length") + else: + write_size = len(values) + for value in values: + if not value == 0 or value == 1: + raise ValueError("Each value must be 0 or 1. 0 is OFF, 1 is ON.") + + self._currentcmd = const.RANDOMWRITE_BITUNITS + command = 5122 + if self.plctype == const.iQR_SERIES: + subcommand = 3 + else: + subcommand = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value(write_size, mode="byte") + for bit_device, value in zip(bit_devices, values): + request_data += self._make_devicedata(bit_device) + request_data += self._encode_devicevalue(value, bit_device, mode="byte") + + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + + def remote_run(self, clear_mode, force_exec=False): + """Run PLC + """ + if not clear_mode == 0: + if not clear_mode == 1: + if not clear_mode == 2: + raise ValueError("clear_device must be 0, 1 or 2. 0: does not clear. 1: clear except latch device. 2: clear all.") + elif not force_exec is True: + if not force_exec is False: + raise ValueError("force_exec must be True or False") + command = 4097 + subcommand = 0 + if force_exec: + mode = 3 + else: + mode = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value(mode, mode="short") + request_data += self._encode_value(clear_mode, mode="byte") + request_data += self._encode_value(0, mode="byte") + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + + def remote_stop(self): + """ Stop remotely. + + """ + command = 4098 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value(1, mode="short") + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + + def remote_pause(self, force_exec=False): + """pause PLC remotely. + """ + if not force_exec is True: + if not force_exec is False: + raise ValueError("force_exec must be True or False") + else: + command = 4099 + subcommand = 0 + if force_exec: + mode = 3 + else: + mode = 1 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value(mode, mode="short") + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + + def remote_latchclear(self): + """Clear latch remotely. + PLC must be stop when use this command. + """ + command = 4101 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value(1, mode="short") + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + + def remote_reset(self): + """Reset remotely. + PLC must be stop when use this command. + """ + command = 4102 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value(1, mode="short") + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + try: + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + except: + pass + + def read_cputype(self): + """Read CPU type + """ + command = 257 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + data_index = self._get_answerdata_index() + cpu_name_length = 16 + if self.commtype == const.COMMTYPE_BINARY: + cpu_type = recv_data[data_index[:data_index + cpu_name_length]].decode() + cpu_type = cpu_type.replace(" ", "") + cpu_code = int.from_bytes(recv_data[(data_index + cpu_name_length)[:None]], "little") + cpu_code = format(cpu_code, "x").rjust(4, "0") + else: + cpu_type = recv_data[data_index[:data_index + cpu_name_length]].decode() + cpu_type = cpu_type.replace(" ", "") + cpu_code = recv_data[(data_index + cpu_name_length)[:None]].decode() + return ( + cpu_type, cpu_code) + + def remote_unlock(self, password='', request_input=False): + """Unlock PLC by inputting password. + """ + if request_input: + password = input("Please enter password\n") + elif password.isascii() is False: + raise ValueError("password must be only ascii code") + elif self.plctype is const.iQR_SERIES: + if not 6 <= len(password) <= 32: + raise ValueError("password length must be from 6 to 32") + elif not 4 == len(password): + raise ValueError("password length must be 4") + command = 5680 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value((len(password)), mode="short") + request_data += password.encode() + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + + def remote_lock(self, password='', request_input=False): + """Lock PLC by inputting password. + """ + if request_input: + password = input("Please enter password\n") + elif password.isascii() is False: + raise ValueError("password must be only ascii code") + elif self.plctype is const.iQR_SERIES: + if not 6 <= len(password) <= 32: + raise ValueError("password length must be from 6 to 32") + elif not 4 == len(password): + raise ValueError("password length must be 4") + command = 5681 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value((len(password)), mode="short") + request_data += password.encode() + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + + def echo_test(self, send_data): + """Do echo test. + Send data and answer data should be same. + """ + if send_data.isascii() is False: + raise ValueError("send_data must be only ascii code") + if not 1 <= len(send_data) <= 960: + raise ValueError("send_data length must be from 1 to 960") + command = 1561 + subcommand = 0 + request_data = bytes() + request_data += self._make_commanddata(command, subcommand) + request_data += self._encode_value((len(send_data)), mode="short") + request_data += send_data.encode() + send_data = self._make_senddata(request_data) + self.channel._send(send_data) + recv_data = self.channel._recv() + self._check_cmdanswer(recv_data) + data_index = self._get_answerdata_index() + answer_len = self._decode_value((recv_data[data_index[:data_index + self._wordsize]]), mode="short") + answer = recv_data[(data_index + self._wordsize)[:None]].decode() + return (answer_len, answer) diff --git a/APPS_UNCOMPILED/lib/mcprotocol/type4c.py b/APPS_UNCOMPILED/lib/mcprotocol/type4c.py new file mode 100644 index 0000000..18e171b --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/type4c.py @@ -0,0 +1,31 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/type4c.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 1067 bytes +""" +mcprotocol +Created on 2021/3/25 +@author: Lius +""" +from .mcformat import * +from .type3c import Type3C + +class Type4C(Type3C): + + def __init__(self, pkg_format, channel, station_no=0, network_no=0, pc_no=255, self_station_no=0, dest_moduleio=1023, dest_modulesta=0): + super().__init__(pkg_format, channel, station_no, network_no, pc_no, self_station_no) + self.dest_moduleio = dest_moduleio + self.dest_modulesta = dest_modulesta + + def _make_accessroute(self): + route_data = bytes() + route_data += self.pkg_format._encode_value(self.station_no, "byte") + route_data += self.pkg_format._encode_value(self.network_no, "byte") + route_data += self.pkg_format._encode_value(self.pc_no, "byte") + route_data += self.pkg_format._encode_value(self.dest_moduleio, "short") + route_data += self.pkg_format._encode_value(self.dest_modulesta, "byte") + route_data += self.pkg_format._encode_value(self.self_station_no, "byte") + return route_data diff --git a/APPS_UNCOMPILED/lib/mcprotocol/type4e.py b/APPS_UNCOMPILED/lib/mcprotocol/type4e.py new file mode 100644 index 0000000..ce4726a --- /dev/null +++ b/APPS_UNCOMPILED/lib/mcprotocol/type4e.py @@ -0,0 +1,61 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/mcprotocol/type4e.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 2154 bytes +""" +mcprotocol +Created on 2021/3/25 +@author: Lius +""" +from . import mcconst as const +from .type3e import Type3E + +class Type4E(Type3E): + __doc__ = "mcprotocol 4E communication class.\n Type 4e is almost same to Type 3E. Difference is only subheader.\n So, Changed self.subhear and self._make_senddata()\n " + subheader = 21504 + subheaderserial = 0 + + def set_subheaderserial(self, subheaderserial): + """Change subheader serial + """ + if 0 <= subheaderserial <= 65535: + self.subheaderserial = subheaderserial + else: + raise ValueError("subheaderserial must be 0 <= subheaderserial <= 65535") + + def _get_answerdata_index(self): + """Get answer data index from return data byte. + 4e type's data index is defferent from 3e type's. + """ + if self.commtype == const.COMMTYPE_BINARY: + return 15 + return 30 + + def _get_answerstatus_index(self): + """Get command status index from return data byte. + """ + if self.commtype == const.COMMTYPE_BINARY: + return 13 + return 26 + + def _make_senddata(self, requestdata): + """Makes send mc protorocl data. + """ + mc_data = bytes() + if self.commtype == const.COMMTYPE_BINARY: + mc_data += self.subheader.to_bytes(2, "big") + else: + mc_data += format(self.subheader, "x").ljust(4, "0").upper().encode() + mc_data += self._encode_value(self.subheaderserial, "short") + mc_data += self._encode_value(0, "short") + mc_data += self._encode_value(self.network, "byte") + mc_data += self._encode_value(self.pc, "byte") + mc_data += self._encode_value(self.dest_moduleio, "short") + mc_data += self._encode_value(self.dest_modulesta, "byte") + mc_data += self._encode_value(self._wordsize + len(requestdata), "short") + mc_data += self._encode_value(self.timer, "short") + mc_data += requestdata + return mc_data diff --git a/APPS_UNCOMPILED/lib/opcua/__init__.py b/APPS_UNCOMPILED/lib/opcua/__init__.py new file mode 100644 index 0000000..f529417 --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/__init__.py @@ -0,0 +1,18 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/__init__.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 396 bytes +""" +Pure Python OPC-UA library +""" +from opcua.common.node import Node +from opcua.common.methods import uamethod +from opcua.common.subscription import Subscription +from opcua.client.client import Client +from opcua.server.server import Server +from opcua.server.event_generator import EventGenerator +import opcua.common.instantiate as instantiate +import opcua.common.copy_node as copy_node diff --git a/APPS_UNCOMPILED/lib/opcua/compat.py b/APPS_UNCOMPILED/lib/opcua/compat.py new file mode 100644 index 0000000..e4cce14 --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/compat.py @@ -0,0 +1,14 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/compat.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 232 bytes +""" Module with Python 2/3 compatibility functions. """ + +def with_metaclass(Meta, *bases): + """ Allows to specify metaclasses in Python 2 and 3 compatible ways. + Might not allow + """ + return Meta("Meta", bases, {}) diff --git a/APPS_UNCOMPILED/lib/opcua/crypto/__init__.py b/APPS_UNCOMPILED/lib/opcua/crypto/__init__.py new file mode 100644 index 0000000..cf1f290 --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/crypto/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/crypto/__init__.py +# Compiled at: 2024-04-18 03:12:55 +pass diff --git a/APPS_UNCOMPILED/lib/opcua/crypto/security_policies.py b/APPS_UNCOMPILED/lib/opcua/crypto/security_policies.py new file mode 100644 index 0000000..c47c419 --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/crypto/security_policies.py @@ -0,0 +1,501 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/crypto/security_policies.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 20002 bytes +import logging +from abc import ABCMeta, abstractmethod +from opcua.ua import CryptographyNone, SecurityPolicy +from opcua.ua import MessageSecurityMode +from opcua.ua import UaError +try: + from opcua.crypto import uacrypto + CRYPTOGRAPHY_AVAILABLE = True +except ImportError: + CRYPTOGRAPHY_AVAILABLE = False + +POLICY_NONE_URI = "http://opcfoundation.org/UA/SecurityPolicy#None" + +def require_cryptography(obj): + """ + Raise exception if cryptography module is not available. + Call this function in constructors. + """ + if not CRYPTOGRAPHY_AVAILABLE: + raise UaError("Can't use {0}, cryptography module is not installed".format(obj.__class__.__name__)) + + +class Signer(object): + __doc__ = "\n Abstract base class for cryptographic signature algorithm\n " + __metaclass__ = ABCMeta + + @abstractmethod + def signature_size(self): + pass + + @abstractmethod + def signature(self, data): + pass + + +class Verifier(object): + __doc__ = "\n Abstract base class for cryptographic signature verification\n " + __metaclass__ = ABCMeta + + @abstractmethod + def signature_size(self): + pass + + @abstractmethod + def verify(self, data, signature): + pass + + +class Encryptor(object): + __doc__ = "\n Abstract base class for encryption algorithm\n " + __metaclass__ = ABCMeta + + @abstractmethod + def plain_block_size(self): + pass + + @abstractmethod + def encrypted_block_size(self): + pass + + @abstractmethod + def encrypt(self, data): + pass + + +class Decryptor(object): + __doc__ = "\n Abstract base class for decryption algorithm\n " + __metaclass__ = ABCMeta + + @abstractmethod + def plain_block_size(self): + pass + + @abstractmethod + def encrypted_block_size(self): + pass + + @abstractmethod + def decrypt(self, data): + pass + + +class Cryptography(CryptographyNone): + __doc__ = "\n Security policy: Sign or SignAndEncrypt\n " + + def __init__(self, mode=MessageSecurityMode.Sign): + self.Signer = None + self.Verifier = None + self.Encryptor = None + self.Decryptor = None + assert mode in (MessageSecurityMode.Sign, + MessageSecurityMode.SignAndEncrypt) + self.is_encrypted = mode == MessageSecurityMode.SignAndEncrypt + + def plain_block_size(self): + """ + Size of plain text block for block cipher. + """ + if self.is_encrypted: + return self.Encryptor.plain_block_size() + return 1 + + def encrypted_block_size(self): + """ + Size of encrypted text block for block cipher. + """ + if self.is_encrypted: + return self.Encryptor.encrypted_block_size() + return 1 + + def padding(self, size): + """ + Create padding for a block of given size. + plain_size = size + len(padding) + signature_size() + plain_size = N * plain_block_size() + """ + if not self.is_encrypted: + return b'' + block_size = self.Encryptor.plain_block_size() + rem = (size + self.signature_size() + 1) % block_size + if rem != 0: + rem = block_size - rem + return bytes(bytearray([rem])) * (rem + 1) + + def min_padding_size(self): + if self.is_encrypted: + return 1 + return 0 + + def signature_size(self): + return self.Signer.signature_size() + + def signature(self, data): + return self.Signer.signature(data) + + def vsignature_size(self): + return self.Verifier.signature_size() + + def verify(self, data, sig): + self.Verifier.verify(data, sig) + + def encrypt(self, data): + if self.is_encrypted: + assert len(data) % self.Encryptor.plain_block_size() == 0 + return self.Encryptor.encrypt(data) + return data + + def decrypt(self, data): + if self.is_encrypted: + return self.Decryptor.decrypt(data) + return data + + def remove_padding(self, data): + if self.is_encrypted: + pad_size = bytearray(data[(-1)[:None]])[0] + 1 + return data[None[:-pad_size]] + return data + + +class SignerRsa(Signer): + + def __init__(self, client_pk): + require_cryptography(self) + self.client_pk = client_pk + self.key_size = self.client_pk.key_size // 8 + + def signature_size(self): + return self.key_size + + def signature(self, data): + return uacrypto.sign_sha1(self.client_pk, data) + + +class VerifierRsa(Verifier): + + def __init__(self, server_cert): + require_cryptography(self) + self.server_cert = server_cert + self.key_size = self.server_cert.public_key().key_size // 8 + + def signature_size(self): + return self.key_size + + def verify(self, data, signature): + uacrypto.verify_sha1(self.server_cert, data, signature) + + +class EncryptorRsa(Encryptor): + + def __init__(self, server_cert, enc_fn, padding_size): + require_cryptography(self) + self.server_cert = server_cert + self.key_size = self.server_cert.public_key().key_size // 8 + self.encryptor = enc_fn + self.padding_size = padding_size + + def plain_block_size(self): + return self.key_size - self.padding_size + + def encrypted_block_size(self): + return self.key_size + + def encrypt(self, data): + encrypted = b'' + block_size = self.plain_block_size() + for i in range(0, len(data), block_size): + encrypted += self.encryptor(self.server_cert.public_key(), data[i[:i + block_size]]) + + return encrypted + + +class DecryptorRsa(Decryptor): + + def __init__(self, client_pk, dec_fn, padding_size): + require_cryptography(self) + self.client_pk = client_pk + self.key_size = self.client_pk.key_size // 8 + self.decryptor = dec_fn + self.padding_size = padding_size + + def plain_block_size(self): + return self.key_size - self.padding_size + + def encrypted_block_size(self): + return self.key_size + + def decrypt(self, data): + decrypted = b'' + block_size = self.encrypted_block_size() + for i in range(0, len(data), block_size): + decrypted += self.decryptor(self.client_pk, data[i[:i + block_size]]) + + return decrypted + + +class SignerAesCbc(Signer): + + def __init__(self, key): + require_cryptography(self) + self.key = key + + def signature_size(self): + return uacrypto.sha1_size() + + def signature(self, data): + return uacrypto.hmac_sha1(self.key, data) + + +class VerifierAesCbc(Verifier): + + def __init__(self, key): + require_cryptography(self) + self.key = key + + def signature_size(self): + return uacrypto.sha1_size() + + def verify(self, data, signature): + expected = uacrypto.hmac_sha1(self.key, data) + if signature != expected: + raise uacrypto.InvalidSignature + + +class EncryptorAesCbc(Encryptor): + + def __init__(self, key, init_vec): + require_cryptography(self) + self.cipher = uacrypto.cipher_aes_cbc(key, init_vec) + + def plain_block_size(self): + return self.cipher.algorithm.key_size // 8 + + def encrypted_block_size(self): + return self.cipher.algorithm.key_size // 8 + + def encrypt(self, data): + return uacrypto.cipher_encrypt(self.cipher, data) + + +class DecryptorAesCbc(Decryptor): + + def __init__(self, key, init_vec): + require_cryptography(self) + self.cipher = uacrypto.cipher_aes_cbc(key, init_vec) + + def plain_block_size(self): + return self.cipher.algorithm.key_size // 8 + + def encrypted_block_size(self): + return self.cipher.algorithm.key_size // 8 + + def decrypt(self, data): + return uacrypto.cipher_decrypt(self.cipher, data) + + +class SignerSha256(Signer): + + def __init__(self, client_pk): + require_cryptography(self) + self.client_pk = client_pk + self.key_size = self.client_pk.key_size // 8 + + def signature_size(self): + return self.key_size + + def signature(self, data): + return uacrypto.sign_sha256(self.client_pk, data) + + +class VerifierSha256(Verifier): + + def __init__(self, server_cert): + require_cryptography(self) + self.server_cert = server_cert + self.key_size = self.server_cert.public_key().key_size // 8 + + def signature_size(self): + return self.key_size + + def verify(self, data, signature): + uacrypto.verify_sha256(self.server_cert, data, signature) + + +class SignerHMac256(Signer): + + def __init__(self, key): + require_cryptography(self) + self.key = key + + def signature_size(self): + return uacrypto.sha256_size() + + def signature(self, data): + return uacrypto.hmac_sha256(self.key, data) + + +class VerifierHMac256(Verifier): + + def __init__(self, key): + require_cryptography(self) + self.key = key + + def signature_size(self): + return uacrypto.sha256_size() + + def verify(self, data, signature): + expected = uacrypto.hmac_sha256(self.key, data) + if signature != expected: + raise uacrypto.InvalidSignature + + +class SecurityPolicyBasic128Rsa15(SecurityPolicy): + __doc__ = "\n DEPRECATED, do not use anymore!\n\n Security Basic 128Rsa15\n A suite of algorithms that uses RSA15 as Key-Wrap-algorithm\n and 128-Bit (16 bytes) for encryption algorithms.\n - SymmetricSignatureAlgorithm - HmacSha1\n (http://www.w3.org/2000/09/xmldsig#hmac-sha1)\n - SymmetricEncryptionAlgorithm - Aes128\n (http://www.w3.org/2001/04/xmlenc#aes128-cbc)\n - AsymmetricSignatureAlgorithm - RsaSha1\n (http://www.w3.org/2000/09/xmldsig#rsa-sha1)\n - AsymmetricKeyWrapAlgorithm - KwRsa15\n (http://www.w3.org/2001/04/xmlenc#rsa-1_5)\n - AsymmetricEncryptionAlgorithm - Rsa15\n (http://www.w3.org/2001/04/xmlenc#rsa-1_5)\n - KeyDerivationAlgorithm - PSha1\n (http://docs.oasis-open.org/ws-sx/ws-secureconversation/200512/dk/p_sha1)\n - DerivedSignatureKeyLength - 128 (16 bytes)\n - MinAsymmetricKeyLength - 1024 (128 bytes)\n - MaxAsymmetricKeyLength - 2048 (256 bytes)\n - CertificateSignatureAlgorithm - Sha1\n\n If a certificate or any certificate in the chain is not signed with\n a hash that is Sha1 or stronger then the certificate shall be rejected.\n " + URI = "http://opcfoundation.org/UA/SecurityPolicy#Basic128Rsa15" + signature_key_size = 16 + symmetric_key_size = 16 + AsymmetricEncryptionURI = "http://www.w3.org/2001/04/xmlenc#rsa-1_5" + AsymmetricSignatureURI = "http://www.w3.org/2000/09/xmldsig#rsa-sha1" + + @staticmethod + def encrypt_asymmetric(pubkey, data): + return uacrypto.encrypt_rsa15(pubkey, data) + + def __init__(self, server_cert, client_cert, client_pk, mode): + logger = logging.getLogger(__name__) + logger.warning("DEPRECATED! Do not use SecurityPolicyBasic128Rsa15 anymore!") + require_cryptography(self) + if isinstance(server_cert, bytes): + server_cert = uacrypto.x509_from_der(server_cert) + self.asymmetric_cryptography = Cryptography(MessageSecurityMode.SignAndEncrypt) + self.asymmetric_cryptography.Signer = SignerRsa(client_pk) + self.asymmetric_cryptography.Verifier = VerifierRsa(server_cert) + self.asymmetric_cryptography.Encryptor = EncryptorRsa(server_cert, uacrypto.encrypt_rsa15, 11) + self.asymmetric_cryptography.Decryptor = DecryptorRsa(client_pk, uacrypto.decrypt_rsa15, 11) + self.symmetric_cryptography = Cryptography(mode) + self.Mode = mode + self.server_certificate = uacrypto.der_from_x509(server_cert) + self.client_certificate = uacrypto.der_from_x509(client_cert) + + def make_local_symmetric_key(self, secret, seed): + key_sizes = ( + self.signature_key_size, self.symmetric_key_size, 16) + sigkey, key, init_vec = uacrypto.p_sha1(secret, seed, key_sizes) + self.symmetric_cryptography.Signer = SignerAesCbc(sigkey) + self.symmetric_cryptography.Encryptor = EncryptorAesCbc(key, init_vec) + + def make_remote_symmetric_key(self, secret, seed): + key_sizes = ( + self.signature_key_size, self.symmetric_key_size, 16) + sigkey, key, init_vec = uacrypto.p_sha1(secret, seed, key_sizes) + self.symmetric_cryptography.Verifier = VerifierAesCbc(sigkey) + self.symmetric_cryptography.Decryptor = DecryptorAesCbc(key, init_vec) + + +class SecurityPolicyBasic256(SecurityPolicy): + __doc__ = "\n DEPRECATED, do not use anymore!\n\n Security Basic 256\n A suite of algorithms that are for 256-Bit (32 bytes) encryption,\n algorithms include:\n - SymmetricSignatureAlgorithm - HmacSha1\n (http://www.w3.org/2000/09/xmldsig#hmac-sha1)\n - SymmetricEncryptionAlgorithm - Aes256\n (http://www.w3.org/2001/04/xmlenc#aes256-cbc)\n - AsymmetricSignatureAlgorithm - RsaSha1\n (http://www.w3.org/2000/09/xmldsig#rsa-sha1)\n - AsymmetricKeyWrapAlgorithm - KwRsaOaep\n (http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p)\n - AsymmetricEncryptionAlgorithm - RsaOaep\n (http://www.w3.org/2001/04/xmlenc#rsa-oaep)\n - KeyDerivationAlgorithm - PSha1\n (http://docs.oasis-open.org/ws-sx/ws-secureconversation/200512/dk/p_sha1)\n - DerivedSignatureKeyLength - 192 (24 bytes)\n - MinAsymmetricKeyLength - 1024 (128 bytes)\n - MaxAsymmetricKeyLength - 2048 (256 bytes)\n - CertificateSignatureAlgorithm - Sha1\n\n If a certificate or any certificate in the chain is not signed with\n a hash that is Sha1 or stronger then the certificate shall be rejected.\n " + URI = "http://opcfoundation.org/UA/SecurityPolicy#Basic256" + signature_key_size = 24 + symmetric_key_size = 32 + AsymmetricEncryptionURI = "http://www.w3.org/2001/04/xmlenc#rsa-oaep" + AsymmetricSignatureURI = "http://www.w3.org/2000/09/xmldsig#rsa-sha1" + + @staticmethod + def encrypt_asymmetric(pubkey, data): + return uacrypto.encrypt_rsa_oaep(pubkey, data) + + def __init__(self, server_cert, client_cert, client_pk, mode): + logger = logging.getLogger(__name__) + logger.warning("DEPRECATED! Do not use SecurityPolicyBasic256 anymore!") + require_cryptography(self) + if isinstance(server_cert, bytes): + server_cert = uacrypto.x509_from_der(server_cert) + self.asymmetric_cryptography = Cryptography(MessageSecurityMode.SignAndEncrypt) + self.asymmetric_cryptography.Signer = SignerRsa(client_pk) + self.asymmetric_cryptography.Verifier = VerifierRsa(server_cert) + self.asymmetric_cryptography.Encryptor = EncryptorRsa(server_cert, uacrypto.encrypt_rsa_oaep, 42) + self.asymmetric_cryptography.Decryptor = DecryptorRsa(client_pk, uacrypto.decrypt_rsa_oaep, 42) + self.symmetric_cryptography = Cryptography(mode) + self.Mode = mode + self.server_certificate = uacrypto.der_from_x509(server_cert) + self.client_certificate = uacrypto.der_from_x509(client_cert) + + def make_local_symmetric_key(self, secret, seed): + key_sizes = ( + self.signature_key_size, self.symmetric_key_size, 16) + sigkey, key, init_vec = uacrypto.p_sha1(secret, seed, key_sizes) + self.symmetric_cryptography.Signer = SignerAesCbc(sigkey) + self.symmetric_cryptography.Encryptor = EncryptorAesCbc(key, init_vec) + + def make_remote_symmetric_key(self, secret, seed): + key_sizes = ( + self.signature_key_size, self.symmetric_key_size, 16) + sigkey, key, init_vec = uacrypto.p_sha1(secret, seed, key_sizes) + self.symmetric_cryptography.Verifier = VerifierAesCbc(sigkey) + self.symmetric_cryptography.Decryptor = DecryptorAesCbc(key, init_vec) + + +class SecurityPolicyBasic256Sha256(SecurityPolicy): + __doc__ = "\n Security Basic 256Sha256\n A suite of algorithms that uses Sha256 as Key-Wrap-algorithm\n and 256-Bit (32 bytes) for encryption algorithms.\n\n - SymmetricSignatureAlgorithm_HMAC-SHA2-256\n https://tools.ietf.org/html/rfc4634\n - SymmetricEncryptionAlgorithm_AES256-CBC\n http://www.w3.org/2001/04/xmlenc#aes256-cbc\n - AsymmetricSignatureAlgorithm_RSA-PKCS15-SHA2-256\n http://www.w3.org/2001/04/xmldsig-more#rsa-sha256\n - AsymmetricEncryptionAlgorithm_RSA-OAEP-SHA1\n http://www.w3.org/2001/04/xmlenc#rsa-oaep\n - KeyDerivationAlgorithm_P-SHA2-256\n http://docs.oasis-open.org/ws-sx/ws-secureconversation/200512/dk/p_sha256\n - CertificateSignatureAlgorithm_RSA-PKCS15-SHA2-256\n http://www.w3.org/2001/04/xmldsig-more#rsa-sha256\n - Basic256Sha256_Limits\n -> DerivedSignatureKeyLength: 256 bits\n -> MinAsymmetricKeyLength: 2048 bits\n -> MaxAsymmetricKeyLength: 4096 bits\n -> SecureChannelNonceLength: 32 bytes\n " + URI = "http://opcfoundation.org/UA/SecurityPolicy#Basic256Sha256" + signature_key_size = 32 + symmetric_key_size = 32 + AsymmetricEncryptionURI = "http://www.w3.org/2001/04/xmlenc#rsa-oaep" + AsymmetricSignatureURI = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" + + @staticmethod + def encrypt_asymmetric(pubkey, data): + return uacrypto.encrypt_rsa_oaep(pubkey, data) + + def __init__(self, server_cert, client_cert, client_pk, mode): + require_cryptography(self) + if isinstance(server_cert, bytes): + server_cert = uacrypto.x509_from_der(server_cert) + self.asymmetric_cryptography = Cryptography(MessageSecurityMode.SignAndEncrypt) + self.asymmetric_cryptography.Signer = SignerSha256(client_pk) + self.asymmetric_cryptography.Verifier = VerifierSha256(server_cert) + self.asymmetric_cryptography.Encryptor = EncryptorRsa(server_cert, uacrypto.encrypt_rsa_oaep, 42) + self.asymmetric_cryptography.Decryptor = DecryptorRsa(client_pk, uacrypto.decrypt_rsa_oaep, 42) + self.symmetric_cryptography = Cryptography(mode) + self.Mode = mode + self.server_certificate = uacrypto.der_from_x509(server_cert) + self.client_certificate = uacrypto.der_from_x509(client_cert) + + def make_local_symmetric_key(self, secret, seed): + key_sizes = ( + self.signature_key_size, self.symmetric_key_size, 16) + sigkey, key, init_vec = uacrypto.p_sha256(secret, seed, key_sizes) + self.symmetric_cryptography.Signer = SignerHMac256(sigkey) + self.symmetric_cryptography.Encryptor = EncryptorAesCbc(key, init_vec) + + def make_remote_symmetric_key(self, secret, seed): + key_sizes = ( + self.signature_key_size, self.symmetric_key_size, 16) + sigkey, key, init_vec = uacrypto.p_sha256(secret, seed, key_sizes) + self.symmetric_cryptography.Verifier = VerifierHMac256(sigkey) + self.symmetric_cryptography.Decryptor = DecryptorAesCbc(key, init_vec) + + +def encrypt_asymmetric(pubkey, data, policy_uri): + """ + Encrypt data with pubkey using an asymmetric algorithm. + The algorithm is selected by policy_uri. + Returns a tuple (encrypted_data, algorithm_uri) + """ + for cls in [SecurityPolicyBasic256Sha256, SecurityPolicyBasic256, SecurityPolicyBasic128Rsa15]: + if policy_uri == cls.URI: + return ( + cls.encrypt_asymmetric(pubkey, data), + cls.AsymmetricEncryptionURI) + + if not policy_uri or policy_uri == POLICY_NONE_URI: + return ( + data, "") + raise UaError("Unsupported security policy `{0}`".format(policy_uri)) diff --git a/APPS_UNCOMPILED/lib/opcua/crypto/uacrypto.py b/APPS_UNCOMPILED/lib/opcua/crypto/uacrypto.py new file mode 100644 index 0000000..e5030c2 --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/crypto/uacrypto.py @@ -0,0 +1,197 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/crypto/uacrypto.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5871 bytes +import os +from cryptography import x509 +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives import hmac +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers import algorithms +from cryptography.hazmat.primitives.ciphers import modes + +def load_certificate(path): + _, ext = os.path.splitext(path) + with open(path, "rb") as f: + if ext == ".pem": + return x509.load_pem_x509_certificate(f.read(), default_backend()) + return x509.load_der_x509_certificate(f.read(), default_backend()) + + +def x509_from_der(data): + if not data: + return + return x509.load_der_x509_certificate(data, default_backend()) + + +def load_private_key(path): + _, ext = os.path.splitext(path) + with open(path, "rb") as f: + if ext == ".pem": + return serialization.load_pem_private_key((f.read()), password=None, backend=(default_backend())) + return serialization.load_der_private_key((f.read()), password=None, backend=(default_backend())) + + +def der_from_x509(certificate): + if certificate is None: + return b'' + return certificate.public_bytes(serialization.Encoding.DER) + + +def sign_sha1(private_key, data): + return private_key.sign(data, padding.PKCS1v15(), hashes.SHA1()) + + +def sign_sha256(private_key, data): + return private_key.sign(data, padding.PKCS1v15(), hashes.SHA256()) + + +def verify_sha1(certificate, data, signature): + certificate.public_key().verify(signature, data, padding.PKCS1v15(), hashes.SHA1()) + + +def verify_sha256(certificate, data, signature): + certificate.public_key().verify(signature, data, padding.PKCS1v15(), hashes.SHA256()) + + +def encrypt_basic256(public_key, data): + ciphertext = public_key.encrypt(data, padding.OAEP(mgf=padding.MGF1(algorithm=(hashes.SHA256())), + algorithm=(hashes.SHA256()), + label=None)) + return ciphertext + + +def encrypt_rsa_oaep(public_key, data): + ciphertext = public_key.encrypt(data, padding.OAEP(mgf=padding.MGF1(algorithm=(hashes.SHA1())), + algorithm=(hashes.SHA1()), + label=None)) + return ciphertext + + +def encrypt_rsa15(public_key, data): + ciphertext = public_key.encrypt(data, padding.PKCS1v15()) + return ciphertext + + +def decrypt_rsa_oaep(private_key, data): + text = private_key.decrypt(data, padding.OAEP(mgf=padding.MGF1(algorithm=(hashes.SHA1())), + algorithm=(hashes.SHA1()), + label=None)) + return text + + +def decrypt_rsa15(private_key, data): + text = private_key.decrypt(data, padding.PKCS1v15()) + return text + + +def cipher_aes_cbc(key, init_vec): + return Cipher(algorithms.AES(key), modes.CBC(init_vec), default_backend()) + + +def cipher_encrypt(cipher, data): + encryptor = cipher.encryptor() + return encryptor.update(data) + encryptor.finalize() + + +def cipher_decrypt(cipher, data): + decryptor = cipher.decryptor() + return decryptor.update(data) + decryptor.finalize() + + +def hmac_sha1(key, message): + hasher = hmac.HMAC(key, (hashes.SHA1()), backend=(default_backend())) + hasher.update(message) + return hasher.finalize() + + +def hmac_sha256(key, message): + hasher = hmac.HMAC(key, (hashes.SHA256()), backend=(default_backend())) + hasher.update(message) + return hasher.finalize() + + +def sha1_size(): + return hashes.SHA1.digest_size + + +def sha256_size(): + return hashes.SHA256.digest_size + + +def p_sha1(secret, seed, sizes=()): + """ + Derive one or more keys from secret and seed. + (See specs part 6, 6.7.5 and RFC 2246 - TLS v1.0) + Lengths of keys will match sizes argument + """ + full_size = 0 + for size in sizes: + full_size += size + + result = b'' + accum = seed + while len(result) < full_size: + accum = hmac_sha1(secret, accum) + result += hmac_sha1(secret, accum + seed) + + parts = [] + for size in sizes: + parts.append(result[None[:size]]) + result = result[size[:None]] + + return tuple(parts) + + +def p_sha256(secret, seed, sizes=()): + """ + Derive one or more keys from secret and seed. + (See specs part 6, 6.7.5 and RFC 2246 - TLS v1.0) + Lengths of keys will match sizes argument + """ + full_size = 0 + for size in sizes: + full_size += size + + result = b'' + accum = seed + while len(result) < full_size: + accum = hmac_sha256(secret, accum) + result += hmac_sha256(secret, accum + seed) + + parts = [] + for size in sizes: + parts.append(result[None[:size]]) + result = result[size[:None]] + + return tuple(parts) + + +def x509_name_to_string(name): + parts = ["{0}={1}".format(attr.oid._name, attr.value) for attr in name] + return ", ".join(parts) + + +def x509_to_string(cert): + """ + Convert x509 certificate to human-readable string + """ + if cert.subject == cert.issuer: + issuer = " (self-signed)" + else: + issuer = ", issuer: {0}".format(x509_name_to_string(cert.issuer)) + return "{0}{1}, {2} - {3}".format(x509_name_to_string(cert.subject), issuer, cert.not_valid_before, cert.not_valid_after) + + +if __name__ == "__main__": + cert = load_certificate("../examples/server_cert.pem") + rsa_privkey = load_private_key("../examples/mykey.pem") + from IPython import embed + embed() diff --git a/APPS_UNCOMPILED/lib/opcua/tools.py b/APPS_UNCOMPILED/lib/opcua/tools.py new file mode 100644 index 0000000..c24e86f --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/tools.py @@ -0,0 +1,699 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/tools.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 30498 bytes +import logging, sys, argparse +from datetime import datetime, timedelta +import math, time +try: + from IPython import embed +except ImportError: + import code + + def embed(): + code.interact(local=dict((globals()), **locals())) + + +from opcua import ua +from opcua import Client +from opcua import Server +from opcua import Node +from opcua import uamethod +from opcua.ua.uaerrors import UaStatusCodeError + +def add_minimum_args(parser): + parser.add_argument("-u", "--url", + help="URL of OPC UA server (for example: opc.tcp://example.org:4840)", + default="opc.tcp://localhost:4840", + metavar="URL") + parser.add_argument("-v", "--verbose", + dest="loglevel", + choices=[ + 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], + default="WARNING", + help="Set log level") + parser.add_argument("--timeout", dest="timeout", + type=int, + default=1, + help="Set socket timeout (NOT the diverse UA timeouts)") + + +def add_common_args(parser, default_node='i=84', require_node=False): + add_minimum_args(parser) + parser.add_argument("-n", "--nodeid", + help="Fully-qualified node ID (for example: i=85). Default: root node", + default=default_node, + required=require_node, + metavar="NODE") + parser.add_argument("-p", "--path", + help="Comma separated browse path to the node starting at NODE (for example: 3:Mybject,3:MyVariable)", + default="", + metavar="BROWSEPATH") + parser.add_argument("-i", "--namespace", + help="Default namespace", + type=int, + default=0, + metavar="NAMESPACE") + parser.add_argument("--security", help="Security settings, for example: Basic256Sha256,SignAndEncrypt,cert.der,pk.pem[,server_cert.der]. Default: None", + default="") + parser.add_argument("--user", help="User name for authentication. Overrides the user name given in the URL.") + parser.add_argument("--password", help="Password name for authentication. Overrides the password given in the URL.") + + +def _require_nodeid(parser, args): + if args.nodeid == "i=84": + if args.path == "": + parser.print_usage() + print("{0}: error: A NodeId or BrowsePath is required".format(parser.prog)) + sys.exit(1) + + +def parse_args(parser, requirenodeid=False): + args = parser.parse_args() + logging.basicConfig(format="%(levelname)s: %(message)s", level=(getattr(logging, args.loglevel))) + if args.url: + if "://" not in args.url: + logging.info("Adding default scheme %s to URL %s", ua.OPC_TCP_SCHEME, args.url) + args.url = ua.OPC_TCP_SCHEME + "://" + args.url + if requirenodeid: + _require_nodeid(parser, args) + return args + + +def get_node(client, args): + node = client.get_node(args.nodeid) + if args.path: + path = args.path.split(",") + if node.nodeid == ua.NodeId(84, 0): + if path[0] == "0:Root": + path = path[1[:None]] + node = node.get_child(path) + return node + + +def uaread(): + parser = argparse.ArgumentParser(description="Read attribute of a node, per default reads value of a node") + add_common_args(parser) + parser.add_argument("-a", "--attribute", + dest="attribute", + type=int, + default=(ua.AttributeIds.Value), + help="Set attribute to read") + parser.add_argument("-t", "--datatype", + dest="datatype", + default="python", + choices=[ + "python", "variant", "datavalue"], + help="Data type to return") + args = parse_args(parser, requirenodeid=True) + client = Client((args.url), timeout=(args.timeout)) + client.set_security_string(args.security) + client.connect() + try: + node = get_node(client, args) + attr = node.get_attribute(args.attribute) + if args.datatype == "python": + print(attr.Value.Value) + else: + if args.datatype == "variant": + print(attr.Value) + else: + print(attr) + finally: + client.disconnect() + + sys.exit(0) + print(args) + + +def _args_to_array(val, array): + if array == "guess": + if "," in val: + array = "true" + if array == "true": + val = val.split(",") + return val + + +def _arg_to_bool(val): + return val in ('true', 'True') + + +def _arg_to_variant(val, array, ptype, varianttype=None): + val = _args_to_array(val, array) + if isinstance(val, list): + val = [ptype(i) for i in val] + else: + val = ptype(val) + if varianttype: + return ua.Variant(val, varianttype) + return ua.Variant(val) + + +def _val_to_variantParse error at or near `COME_FROM' instruction at offset 764_0 + + +def _configure_client_with_args(client, args): + if args.user: + client.set_user(args.user) + if args.password: + client.set_password(args.password) + client.set_security_string(args.security) + + +def uawrite(): + parser = argparse.ArgumentParser(description="Write attribute of a node, per default write value of node") + add_common_args(parser) + parser.add_argument("-a", "--attribute", + dest="attribute", + type=int, + default=(ua.AttributeIds.Value), + help="Set attribute to read") + parser.add_argument("-l", "--list", + "--array", + dest="array", + default="guess", + choices=[ + "guess", "true", "false"], + help="Value is an array") + parser.add_argument("-t", "--datatype", + dest="datatype", + default="guess", + choices=[ + 'guess', 'byte', 'sbyte', 'nodeid', 'expandednodeid', 'qualifiedname', + 'browsename', 'string', 'float', 'double', 'int16', 'int32', 'int64', + 'uint16', 'uint32', 'uint64', 'bool', 'string', 'datetime', 'bytestring', + 'xmlelement', 'statuscode', 'localizedtext'], + help="Data type to return") + parser.add_argument("value", help="Value to be written", + metavar="VALUE") + args = parse_args(parser, requirenodeid=True) + client = Client((args.url), timeout=(args.timeout)) + _configure_client_with_args(client, args) + client.connect() + try: + node = get_node(client, args) + val = _val_to_variant(args.value, args) + node.set_attribute(args.attribute, ua.DataValue(val)) + finally: + client.disconnect() + + sys.exit(0) + print(args) + + +def uals(): + parser = argparse.ArgumentParser(description="Browse OPC-UA node and print result") + add_common_args(parser) + parser.add_argument("-l", dest="long_format", + const=3, + nargs="?", + type=int, + help="use a long listing format") + parser.add_argument("-d", "--depth", + default=1, + type=int, + help="Browse depth") + args = parse_args(parser) + if args.long_format is None: + args.long_format = 1 + client = Client((args.url), timeout=(args.timeout)) + _configure_client_with_args(client, args) + client.connect() + try: + node = get_node(client, args) + print("Browsing node {0} at {1}\n".format(node, args.url)) + if args.long_format == 0: + _lsprint_0(node, args.depth - 1) + else: + if args.long_format == 1: + _lsprint_1(node, args.depth - 1) + else: + _lsprint_long(node, args.depth - 1) + finally: + client.disconnect() + + sys.exit(0) + print(args) + + +def _lsprint_0(node, depth, indent=''): + if not indent: + print("{0:30} {1:25}".format("DisplayName", "NodeId")) + print("") + for desc in node.get_children_descriptions(): + print("{0}{1:30} {2:25}".format(indent, desc.DisplayName.to_string(), desc.NodeId.to_string())) + if depth: + _lsprint_0(Node(node.server, desc.NodeId), depth - 1, indent + " ") + + +def _lsprint_1(node, depth, indent=''): + if not indent: + print("{0:30} {1:25} {2:25} {3:25}".format("DisplayName", "NodeId", "BrowseName", "Value")) + print("") + for desc in node.get_children_descriptions(): + if desc.NodeClass == ua.NodeClass.Variable: + try: + val = Node(node.server, desc.NodeId).get_value() + except UaStatusCodeError as err: + try: + val = "Bad (0x{0:x})".format(err.code) + finally: + err = None + del err + + print("{0}{1:30} {2!s:25} {3!s:25}, {4!s:3}".format(indent, desc.DisplayName.to_string(), desc.NodeId.to_string(), desc.BrowseName.to_string(), val)) + else: + print("{0}{1:30} {2!s:25} {3!s:25}".format(indent, desc.DisplayName.to_string(), desc.NodeId.to_string(), desc.BrowseName.to_string())) + if depth: + _lsprint_1(Node(node.server, desc.NodeId), depth - 1, indent + " ") + + +def _lsprint_long(pnode, depth, indent=''): + if not indent: + print("{0:30} {1:25} {2:25} {3:10} {4:30} {5:25}".format("DisplayName", "NodeId", "BrowseName", "DataType", "Timestamp", "Value")) + print("") + for node in pnode.get_children(): + attrs = node.get_attributes([ua.AttributeIds.DisplayName, + ua.AttributeIds.BrowseName, + ua.AttributeIds.NodeClass, + ua.AttributeIds.WriteMask, + ua.AttributeIds.UserWriteMask, + ua.AttributeIds.DataType, + ua.AttributeIds.Value]) + name, bname, nclass, mask, umask, dtype, val = [attr.Value.Value for attr in attrs] + update = attrs[-1].ServerTimestamp + if nclass == ua.NodeClass.Variable: + print("{0}{1:30} {2:25} {3:25} {4:10} {5!s:30} {6!s:25}".format(indent, name.to_string(), node.nodeid.to_string(), bname.to_string(), dtype.to_string(), update, val)) + else: + print("{0}{1:30} {2:25} {3:25}".format(indent, name.to_string(), bname.to_string(), node.nodeid.to_string())) + if depth: + _lsprint_long(node, depth - 1, indent + " ") + + +class SubHandler(object): + + def datachange_notification(self, node, val, data): + print("New data change event", node, val, data) + + def event_notification(self, event): + print("New event", event) + + +def uasubscribe(): + parser = argparse.ArgumentParser(description="Subscribe to a node and print results") + add_common_args(parser) + parser.add_argument("-t", "--eventtype", + dest="eventtype", + default="datachange", + choices=[ + "datachange", "event"], + help="Event type to subscribe to") + args = parse_args(parser, requirenodeid=False) + if args.eventtype == "datachange": + _require_nodeid(parser, args) + else: + if args.nodeid == "i=84": + if args.path == "": + args.nodeid = "i=2253" + client = Client((args.url), timeout=(args.timeout)) + _configure_client_with_args(client, args) + client.connect() + try: + node = get_node(client, args) + handler = SubHandler() + sub = client.create_subscription(500, handler) + if args.eventtype == "datachange": + sub.subscribe_data_change(node) + else: + sub.subscribe_events(node) + print("Type Ctr-C to exit") + while True: + time.sleep(1) + + finally: + client.disconnect() + + sys.exit(0) + print(args) + + +def application_to_strings(app): + result = [] + result.append(("Application URI", app.ApplicationUri)) + optionals = [ + ( + "Product URI", app.ProductUri), + ( + "Application Name", app.ApplicationName.to_string()), + ( + "Application Type", str(app.ApplicationType)), + ( + "Gateway Server URI", app.GatewayServerUri), + ( + "Discovery Profile URI", app.DiscoveryProfileUri)] + for n, v in optionals: + if v: + result.append((n, v)) + + for url in app.DiscoveryUrls: + result.append(("Discovery URL", url)) + + return result + + +def cert_to_string(der): + if not der: + return "[no certificate]" + try: + from opcua.crypto import uacrypto + except ImportError: + return "{0} bytes".format(len(der)) + else: + cert = uacrypto.x509_from_der(der) + return uacrypto.x509_to_string(cert) + + +def endpoint_to_strings(ep): + result = [ + ( + "Endpoint URL", ep.EndpointUrl)] + result += application_to_strings(ep.Server) + result += [ + ( + "Server Certificate", cert_to_string(ep.ServerCertificate)), + ( + "Security Mode", str(ep.SecurityMode)), + ( + "Security Policy URI", ep.SecurityPolicyUri)] + for tok in ep.UserIdentityTokens: + result += [ + ( + "User policy", tok.PolicyId), + ( + " Token type", str(tok.TokenType))] + if not tok.IssuedTokenType: + if tok.IssuerEndpointUrl: + result += [ + ( + " Issued Token type", tok.IssuedTokenType), + ( + " Issuer Endpoint URL", tok.IssuerEndpointUrl)] + if tok.SecurityPolicyUri: + result.append((" Security Policy URI", tok.SecurityPolicyUri)) + + result += [ + ( + "Transport Profile URI", ep.TransportProfileUri), + ( + "Security Level", ep.SecurityLevel)] + return result + + +def uaclient(): + parser = argparse.ArgumentParser(description="Connect to server and start python shell. root and objects nodes are available. Node specificed in command line is available as mynode variable") + add_common_args(parser) + parser.add_argument("-c", "--certificate", + help="set client certificate") + parser.add_argument("-k", "--private_key", + help="set client private key") + args = parse_args(parser) + client = Client((args.url), timeout=(args.timeout)) + _configure_client_with_args(client, args) + if args.certificate: + client.load_client_certificate(args.certificate) + if args.private_key: + client.load_private_key(args.private_key) + client.connect() + try: + root = client.get_root_node() + objects = client.get_objects_node() + mynode = get_node(client, args) + embed() + finally: + client.disconnect() + + sys.exit(0) + + +def uaserver(): + parser = argparse.ArgumentParser(description="Run an example OPC-UA server. By importing xml definition and using uawrite command line, it is even possible to expose real data using this server") + parser.add_argument("-u", "--url", + help="URL of OPC UA server, default is opc.tcp://0.0.0.0:4840", + default="opc.tcp://0.0.0.0:4840", + metavar="URL") + parser.add_argument("-v", "--verbose", + dest="loglevel", + choices=[ + 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], + default="WARNING", + help="Set log level") + parser.add_argument("-x", "--xml", + metavar="XML_FILE", + help="Populate address space with nodes defined in XML") + parser.add_argument("-p", "--populate", + action="store_true", + help="Populate address space with some sample nodes") + parser.add_argument("-c", "--disable-clock", + action="store_true", + help="Disable clock, to avoid seeing many write if debugging an application") + parser.add_argument("-s", "--shell", + action="store_true", + help="Start python shell instead of randomly changing node values") + parser.add_argument("--certificate", help="set server certificate") + parser.add_argument("--private_key", help="set server private key") + args = parser.parse_args() + logging.basicConfig(format="%(levelname)s: %(message)s", level=(getattr(logging, args.loglevel))) + server = Server() + server.set_endpoint(args.url) + if args.certificate: + server.load_certificate(args.certificate) + if args.private_key: + server.load_private_key(args.private_key) + server.disable_clock(args.disable_clock) + server.set_server_name("FreeOpcUa Example Server") + if args.xml: + server.import_xml(args.xml) + if args.populate: + + @uamethod + def multiply(parent, x, y): + print("multiply method call with parameters: ", x, y) + return x * y + + uri = "http://examples.freeopcua.github.io" + idx = server.register_namespace(uri) + objects = server.get_objects_node() + myobj = objects.add_object(idx, "MyObject") + mywritablevar = myobj.add_variable(idx, "MyWritableVariable", 6.7) + mywritablevar.set_writable() + myvar = myobj.add_variable(idx, "MyVariable", 6.7) + myarrayvar = myobj.add_variable(idx, "MyVarArray", [6.7, 7.9]) + myprop = myobj.add_property(idx, "MyProperty", "I am a property") + mymethod = myobj.add_method(idx, "MyMethod", multiply, [ua.VariantType.Double, ua.VariantType.Int64], [ua.VariantType.Double]) + server.start() + try: + if args.shell: + embed() + else: + if args.populate: + count = 0 + while True: + time.sleep(1) + myvar.set_value(math.sin(count / 10)) + myarrayvar.set_value([math.sin(count / 10), math.sin(count / 100)]) + count += 1 + + else: + while True: + time.sleep(1) + + finally: + server.stop() + + sys.exit(0) + + +def uadiscover(): + parser = argparse.ArgumentParser(description="Performs OPC UA discovery and prints information on servers and endpoints.") + add_minimum_args(parser) + parser.add_argument("-n", "--network", + action="store_true", + help="Also send a FindServersOnNetwork request to server") + args = parse_args(parser) + client = Client((args.url), timeout=(args.timeout)) + if args.network: + print("Performing discovery at {0}\n".format(args.url)) + for i, server in enumerate((client.connect_and_find_servers_on_network()), start=1): + print("Server {0}:".format(i)) + print("") + + print("Performing discovery at {0}\n".format(args.url)) + for i, server in enumerate((client.connect_and_find_servers()), start=1): + print("Server {0}:".format(i)) + for n, v in application_to_strings(server): + print(" {0}: {1}".format(n, v)) + + print("") + + for i, ep in enumerate((client.connect_and_get_server_endpoints()), start=1): + print("Endpoint {0}:".format(i)) + for n, v in endpoint_to_strings(ep): + print(" {0}: {1}".format(n, v)) + + print("") + + sys.exit(0) + + +def print_history(o): + print("{0:30} {1:10} {2}".format("Source timestamp", "Status", "Value")) + for d in o: + print("{0:30} {1:10} {2}".format(str(d.SourceTimestamp), d.StatusCode.name, d.Value.Value)) + + +def str_to_datetime(s, default=None): + if not s: + if default is not None: + return default + return datetime.utcnow() + for fmt in ('%Y-%m-%d', '%Y-%m-%d %H:%M', '%Y-%m-%d %H:%M:%S'): + try: + return datetime.strptime(s, fmt) + except ValueError: + pass + + +def uahistoryread(): + parser = argparse.ArgumentParser(description="Read history of a node") + add_common_args(parser) + parser.add_argument("--starttime", default=None, + help="Start time, formatted as YYYY-MM-DD [HH:MM[:SS]]. Default: current time - one day") + parser.add_argument("--endtime", default=None, + help="End time, formatted as YYYY-MM-DD [HH:MM[:SS]]. Default: current time") + parser.add_argument("-e", "--events", + action="store_true", + help="Read event history instead of data change history") + parser.add_argument("-l", "--limit", + type=int, + default=10, + help="Maximum number of notfication to return") + args = parse_args(parser, requirenodeid=True) + client = Client((args.url), timeout=(args.timeout)) + _configure_client_with_args(client, args) + client.connect() + try: + node = get_node(client, args) + starttime = str_to_datetime(args.starttime, datetime.utcnow() - timedelta(days=1)) + endtime = str_to_datetime(args.endtime, datetime.utcnow()) + print("Reading raw history of node {0} at {1}; start at {2}, end at {3}\n".format(node, args.url, starttime, endtime)) + if args.events: + evs = node.read_event_history(starttime, endtime, numvalues=(args.limit)) + for ev in evs: + print(ev) + + else: + print_history(node.read_raw_history(starttime, endtime, numvalues=(args.limit))) + finally: + client.disconnect() + + sys.exit(0) + + +def uacall(): + parser = argparse.ArgumentParser(description="Call method of a node") + add_common_args(parser) + parser.add_argument("-m", "--method", + dest="method", + type=int, + default=None, + help="Set method to call. If not given then (single) method of the selected node is used.") + parser.add_argument("-M", "--method-name", + dest="method_name", + type=str, + default=None, + help="Set name of method to call. Overrides --method") + parser.add_argument("-l", "--list", + "--array", + dest="array", + default="guess", + choices=[ + "guess", "true", "false"], + help="Value is an array") + parser.add_argument("-t", "--datatype", + dest="datatype", + default="guess", + choices=[ + 'guess', 'byte', 'sbyte', 'nodeid', 'expandednodeid', 'qualifiedname', + 'browsename', 'string', 'float', 'double', 'int16', 'int32', 'int64', + 'uint16', 'uint32', 'uint64', 'bool', 'string', 'datetime', 'bytestring', + 'xmlelement', 'statuscode', 'localizedtext'], + help="Data type to return") + parser.add_argument("value", help="Value to use for call to method, if any", + nargs="?", + metavar="VALUE") + args = parse_args(parser, requirenodeid=True) + client = Client((args.url), timeout=(args.timeout)) + _configure_client_with_args(client, args) + client.connect() + try: + node = get_node(client, args) + if args.value is None: + val = () + else: + val = ( + _val_to_variant(args.value, args),) + methods = node.get_methods() + method_id = None + if args.method_name is not None: + method_id = args.method_name + else: + if args.method is None: + if len(methods) == 0: + raise ValueError("No methods in selected node and no method given") + else: + if len(methods) == 1: + method_id = methods[0] + else: + raise ValueError(("Selected node has {0:d} methods but no method given. Provide one of {1!s}".format)(*methods)) + else: + for m in methods: + if m.nodeid.Identifier == args.method: + method_id = m.nodeid + break + + if method_id is None: + method_id = ua.NodeId(identifier=(args.method)) + result_variants = (node.call_method)(method_id, *val) + print("resulting result_variants={0!s}".format(result_variants)) + finally: + client.disconnect() + + sys.exit(0) + print(args) + + +def uageneratestructs(): + parser = argparse.ArgumentParser(description="Generate a Python module from the xml structure definition (.bsd)") + add_common_args(parser, require_node=True) + parser.add_argument("-o", "--output", + dest="output_path", + required=True, + type=str, + default=None, + help="The python file to be generated.") + args = parse_args(parser, requirenodeid=True) + client = Client((args.url), timeout=(args.timeout)) + _configure_client_with_args(client, args) + client.connect() + try: + node = get_node(client, args) + generators, _ = client.load_type_definitions([node]) + generators[0].save_to_file(args.output_path, True) + finally: + client.disconnect() + + sys.exit(0) \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/opcua/ua/attribute_ids.py b/APPS_UNCOMPILED/lib/opcua/ua/attribute_ids.py new file mode 100644 index 0000000..9fb7025 --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/ua/attribute_ids.py @@ -0,0 +1,37 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/ua/attribute_ids.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 666 bytes +from enum import IntEnum + +class AttributeIds(IntEnum): + NodeId = 1 + NodeClass = 2 + BrowseName = 3 + DisplayName = 4 + Description = 5 + WriteMask = 6 + UserWriteMask = 7 + IsAbstract = 8 + Symmetric = 9 + InverseName = 10 + ContainsNoLoops = 11 + EventNotifier = 12 + Value = 13 + DataType = 14 + ValueRank = 15 + ArrayDimensions = 16 + AccessLevel = 17 + UserAccessLevel = 18 + MinimumSamplingInterval = 19 + Historizing = 20 + Executable = 21 + UserExecutable = 22 + DataTypeDefinition = 23 + RolePermissions = 24 + UserRolePermissions = 25 + AccessRestrictions = 26 + AccessLevelEx = 27 diff --git a/APPS_UNCOMPILED/lib/opcua/ua/object_ids.py b/APPS_UNCOMPILED/lib/opcua/ua/object_ids.py new file mode 100644 index 0000000..523c9be --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/ua/object_ids.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/ua/object_ids.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1451711 bytes diff --git a/APPS_UNCOMPILED/lib/opcua/ua/uaprotocol_hand.py b/APPS_UNCOMPILED/lib/opcua/ua/uaprotocol_hand.py new file mode 100644 index 0000000..3b41449 --- /dev/null +++ b/APPS_UNCOMPILED/lib/opcua/ua/uaprotocol_hand.py @@ -0,0 +1,353 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/opcua/ua/uaprotocol_hand.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 10616 bytes +import struct +from opcua.ua import uaprotocol_auto as auto +from opcua.ua import uatypes +from opcua.common import utils +from opcua.ua.uatypes import AccessLevel, FrozenClass +OPC_TCP_SCHEME = "opc.tcp" + +class Hello(uatypes.FrozenClass): + ua_types = (('ProtocolVersion', 'UInt32'), ('ReceiveBufferSize', 'UInt32'), ('SendBufferSize', 'UInt32'), + ('MaxMessageSize', 'UInt32'), ('MaxChunkCount', 'UInt32'), ('EndpointUrl', 'String')) + + def __init__(self): + self.ProtocolVersion = 0 + self.ReceiveBufferSize = 65536 + self.SendBufferSize = 65536 + self.MaxMessageSize = 0 + self.MaxChunkCount = 0 + self.EndpointUrl = "" + self._freeze = True + + +class MessageType(object): + Invalid = b'INV' + Hello = b'HEL' + Acknowledge = b'ACK' + Error = b'ERR' + SecureOpen = b'OPN' + SecureClose = b'CLO' + SecureMessage = b'MSG' + + +class ChunkType(object): + Invalid = b'0' + Single = b'F' + Intermediate = b'C' + Abort = b'A' + + +class Header(uatypes.FrozenClass): + + def __init__(self, msgType=None, chunkType=None, channelid=0): + self.MessageType = msgType + self.ChunkType = chunkType + self.ChannelId = channelid + self.body_size = 0 + self.packet_size = 0 + self._freeze = True + + def add_size(self, size): + self.body_size += size + + @staticmethod + def max_size(): + return struct.calcsize("<3scII") + + def __str__(self): + return "Header(type:{0}, chunk_type:{1}, body_size:{2}, channel:{3})".format(self.MessageType, self.ChunkType, self.body_size, self.ChannelId) + + __repr__ = __str__ + + +class ErrorMessage(uatypes.FrozenClass): + ua_types = (('Error', 'StatusCode'), ('Reason', 'String')) + + def __init__(self): + self.Error = uatypes.StatusCode() + self.Reason = "" + self._freeze = True + + def __str__(self): + return "MessageAbort(error:{0}, reason:{1})".format(self.Error, self.Reason) + + __repr__ = __str__ + + +class Acknowledge(uatypes.FrozenClass): + ua_types = [ + ('ProtocolVersion', 'UInt32'), + ('ReceiveBufferSize', 'UInt32'), + ('SendBufferSize', 'UInt32'), + ('MaxMessageSize', 'UInt32'), + ('MaxChunkCount', 'UInt32')] + + def __init__(self): + self.ProtocolVersion = 0 + self.ReceiveBufferSize = 65536 + self.SendBufferSize = 65536 + self.MaxMessageSize = 0 + self.MaxChunkCount = 0 + self._freeze = True + + +class AsymmetricAlgorithmHeader(uatypes.FrozenClass): + ua_types = [ + ('SecurityPolicyURI', 'String'), + ('SenderCertificate', 'ByteString'), + ('ReceiverCertificateThumbPrint', 'ByteString')] + + def __init__(self): + self.SecurityPolicyURI = "http://opcfoundation.org/UA/SecurityPolicy#None" + self.SenderCertificate = None + self.ReceiverCertificateThumbPrint = None + self._freeze = True + + def __str__(self): + size1 = len(self.SenderCertificate) if self.SenderCertificate is not None else None + size2 = len(self.ReceiverCertificateThumbPrint) if self.ReceiverCertificateThumbPrint is not None else None + return "{0}(SecurityPolicy:{1}, certificatesize:{2}, receiverCertificatesize:{3} )".format(self.__class__.__name__, self.SecurityPolicyURI, size1, size2) + + __repr__ = __str__ + + +class SymmetricAlgorithmHeader(uatypes.FrozenClass): + ua_types = [ + ('TokenId', 'UInt32')] + + def __init__(self): + self.TokenId = 0 + self._freeze = True + + @staticmethod + def max_size(): + return struct.calcsize("".format(self.__class__.__name__, str(self)) + + def serialize(self): + raise NotImplementedError + + +class Variable(Node): + + def serialize(self): + return str(self) + + +class Value(Node): + + def serialize(self): + return '"{0}"'.format(self) + + +class Op(Node): + + def serialize(self): + return str(self) + + +VARIABLE = L("implementation_version") | L("platform_python_implementation") | L("implementation_name") | L("python_full_version") | L("platform_release") | L("platform_version") | L("platform_machine") | L("platform_system") | L("python_version") | L("sys_platform") | L("os_name") | L("os.name") | L("sys.platform") | L("platform.version") | L("platform.machine") | L("platform.python_implementation") | L("python_implementation") | L("extra") +ALIASES = { + 'os.name': '"os_name"', + 'sys.platform': '"sys_platform"', + 'platform.version': '"platform_version"', + 'platform.machine': '"platform_machine"', + 'platform.python_implementation': '"platform_python_implementation"', + 'python_implementation': '"platform_python_implementation"'} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) +VERSION_CMP = L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) +BOOLOP = L("and") | L("or") +MARKER_VAR = VARIABLE | MARKER_VALUE +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) +MARKER = stringStart + MARKER_EXPR + stringEnd + +def _coerce_parse_result(results): + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + return results + + +def _format_marker(marker, first=True): + if not isinstance(marker, (list, tuple, string_types)): + raise AssertionError + elif isinstance(marker, list): + if len(marker) == 1: + if isinstance(marker[0], (list, tuple)): + return _format_marker(marker[0]) + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + return "(" + " ".join(inner) + ")" + else: + if isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + return marker + + +_operators = {'in':lambda lhs, rhs: lhs in rhs, + 'not in':lambda lhs, rhs: lhs not in rhs, + '<':operator.lt, + '<=':operator.le, + '==':operator.eq, + '!=':operator.ne, + '>=':operator.ge, + '>':operator.gt} + +def _eval_op(lhs, op, rhs): + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + oper = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison("Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)) + return oper(lhs, rhs) + + +class Undefined(object): + pass + + +_undefined = Undefined() + +def _get_env(environment, name): + value = environment.get(name, _undefined) + if isinstance(value, Undefined): + raise UndefinedEnvironmentName("{0!r} does not exist in evaluation environment.".format(name)) + return value + + +def _evaluate_markers(markers, environment): + groups = [[]] + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + elif not marker in ('and', 'or'): + raise AssertionError + if marker == "or": + groups.append([]) + + return any((all(item) for item in groups)) + + +def format_full_version(info): + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + if hasattr(sys, "implementation"): + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + iver = "0" + implementation_name = "" + return {'implementation_name':implementation_name, + 'implementation_version':iver, + 'os_name':os.name, + 'platform_machine':(platform.machine)(), + 'platform_release':(platform.release)(), + 'platform_system':(platform.system)(), + 'platform_version':(platform.version)(), + 'python_full_version':(platform.python_version)(), + 'platform_python_implementation':(platform.python_implementation)(), + 'python_version':(".".join)(platform.python_version_tuple()[None[:2]]), + 'sys_platform':sys.platform} + + +class Marker(object): + + def __init__(self, marker): + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + try: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(marker, marker[e.loc[:e.loc + 8]]) + raise InvalidMarker(err_str) + finally: + e = None + del e + + def __str__(self): + return _format_marker(self._markers) + + def __repr__(self): + return "".format(str(self)) + + def evaluate(self, environment=None): + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + return _evaluate_markers(self._markers, current_environment) diff --git a/APPS_UNCOMPILED/lib/packaging/requirements.py b/APPS_UNCOMPILED/lib/packaging/requirements.py new file mode 100644 index 0000000..2df977d --- /dev/null +++ b/APPS_UNCOMPILED/lib/packaging/requirements.py @@ -0,0 +1,108 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/packaging/requirements.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 5089 bytes +from __future__ import absolute_import, division, print_function +import string, re, sys +from pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from pyparsing import Literal as L +from ._typing import TYPE_CHECKING +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet +if sys.version_info[0] >= 3: + from urllib import parse as urlparse +else: + import urlparse +if TYPE_CHECKING: + from typing import List, Optional as TOptional, Set + +class InvalidRequirement(ValueError): + __doc__ = "\n An invalid requirement was found, users should refer to PEP 508.\n " + + +ALPHANUM = Word(string.ascii_letters + string.digits) +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | ZeroOrMore(PUNCTUATION) + ALPHANUM +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER +URI = Regex("[^ ]+")("url") +URL = AT + URI +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = LBRACKET + Optional(EXTRAS_LIST) + RBRACKET("extras") +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine((VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE)), + joinString=",", adjacent=False)("_raw_spec") +_VERSION_SPEC = Optional(LPAREN + VERSION_MANY + RPAREN | VERSION_MANY) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction(lambda s, l, t: Marker(s[t._original_start[:t._original_end]])) +MARKER_SEPARATOR = SEMICOLON +MARKER = MARKER_SEPARATOR + MARKER_EXPR +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) +NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +REQUIREMENT.parseString("x[]") + +class Requirement(object): + __doc__ = "Parse a requirement.\n\n Parse a given requirement string into its parts, such as name, specifier,\n URL, and extras. Raises InvalidRequirement on a badly-formed requirement\n string.\n " + + def __init__(self, requirement_string): + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + try: + raise InvalidRequirement('Parse error at "{0!r}": {1}'.format(requirement_string[e.loc[:e.loc + 8]], e.msg)) + finally: + e = None + del e + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if parsed_url.scheme == "file": + if urlparse.urlunparse(parsed_url) != req.url: + raise InvalidRequirement("Invalid URL given") + else: + if not (parsed_url.scheme and parsed_url.netloc and (parsed_url.scheme or parsed_url.netloc)): + raise InvalidRequirement("Invalid URL: {0}".format(req.url)) + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + parts = [ + self.name] + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + if self.specifier: + parts.append(str(self.specifier)) + if self.url: + parts.append("@ {0}".format(self.url)) + if self.marker: + parts.append(" ") + if self.marker: + parts.append("; {0}".format(self.marker)) + return "".join(parts) + + def __repr__(self): + return "".format(str(self)) diff --git a/APPS_UNCOMPILED/lib/packaging/specifiers.py b/APPS_UNCOMPILED/lib/packaging/specifiers.py new file mode 100644 index 0000000..5e7e812 --- /dev/null +++ b/APPS_UNCOMPILED/lib/packaging/specifiers.py @@ -0,0 +1,483 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/packaging/specifiers.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 32208 bytes +from __future__ import absolute_import, division, print_function +import abc, functools, itertools, re, warnings +from ._compat import string_types, with_metaclass +from ._typing import TYPE_CHECKING +from .utils import canonicalize_version +from .version import Version, LegacyVersion, parse +if TYPE_CHECKING: + from typing import List, Dict, Union, Iterable, Iterator, Optional, Callable, Tuple + ParsedVersion = Union[(Version, LegacyVersion)] + UnparsedVersion = Union[(Version, LegacyVersion, str)] + CallableOperator = Callable[([ParsedVersion, str], bool)] + +class InvalidSpecifier(ValueError): + __doc__ = "\n An invalid specifier was found, users should refer to PEP 440.\n " + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + pass + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + pass + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + pass + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + pass + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + pass + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + pass + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + pass + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + pass + + +class _IndividualSpecifier(BaseSpecifier): + _operators = {} + + def __init__(self, spec='', prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + self._spec = ( + match.group("operator").strip(), + match.group("version").strip()) + self._prereleases = prereleases + + def __repr__(self): + pre = ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None else "" + return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) + + def __str__(self): + return ("{0}{1}".format)(*self._spec) + + @property + def _canonical_spec(self): + return ( + self._spec[0], canonicalize_version(self._spec[1])) + + def __hash__(self): + return hash(self._canonical_spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + + else: + if not isinstance(other, self.__class__): + return NotImplemented + return self._canonical_spec == other._canonical_spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + + else: + if not isinstance(other, self.__class__): + return NotImplemented + return self._spec != other._spec + + def _get_operator(self, op): + operator_callable = getattr(self, "_compare_{0}".format(self._operators[op])) + return operator_callable + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + return self._spec[0] + + @property + def version(self): + return self._spec[1] + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + if prereleases is None: + prereleases = self.prereleases + normalized_item = self._coerce_version(item) + if normalized_item.is_prerelease: + if not prereleases: + return False + operator_callable = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + kw = {"prereleases": (prereleases if prereleases is not None else True)} + for version in iterable: + parsed_version = self._coerce_version(version) + if (self.contains)(parsed_version, **kw): + if parsed_version.is_prerelease: + if not prereleases: + if not self.prereleases: + found_prereleases.append(version) + yielded = True + yield version + + if not yielded: + if found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + _regex_str = '\n (?P(==|!=|<=|>=|<|>))\n \\s*\n (?P\n [^,;\\s)]* # Since this is a "legacy" specifier, and the version\n # string can be just about anything, we match everything\n # except for whitespace, a semi-colon for marker support,\n # a closing paren since versions can be enclosed in\n # them, and a comma since it\'s a version separator.\n )\n ' + _regex = re.compile("^\\s*" + _regex_str + "\\s*$", re.VERBOSE | re.IGNORECASE) + _operators = { + '==': '"equal"', + '!=': '"not_equal"', + '<=': '"less_than_equal"', + '>=': '"greater_than_equal"', + '<': '"less_than"', + '>': '"greater_than"'} + + def __init__(self, spec='', prereleases=None): + super(LegacySpecifier, self).__init__(spec, prereleases) + warnings.warn("Creating a LegacyVersion has been deprecated and will be removed in the next major release", DeprecationWarning) + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + + return wrapped + + +class Specifier(_IndividualSpecifier): + _regex_str = "\n (?P(~=|==|!=|<=|>=|<|>|===))\n (?P\n (?:\n # The identity operators allow for an escape hatch that will\n # do an exact string match of the version you wish to install.\n # This will not be parsed by PEP 440 and we cannot determine\n # any semantic meaning from it. This operator is discouraged\n # but included entirely as an escape hatch.\n (?<====) # Only match for the identity operator\n \\s*\n [^\\s]* # We just match everything, except for whitespace\n # since we are only testing for strict identity.\n )\n |\n (?:\n # The (non)equality operators allow for wild card and local\n # versions to be specified so we have to define these two\n # operators separately to enable that.\n (?<===|!=) # Only match for equals and not equals\n\n \\s*\n v?\n (?:[0-9]+!)? # epoch\n [0-9]+(?:\\.[0-9]+)* # release\n (?: # pre release\n [-_\\.]?\n (a|b|c|rc|alpha|beta|pre|preview)\n [-_\\.]?\n [0-9]*\n )?\n (?: # post release\n (?:-[0-9]+)|(?:[-_\\.]?(post|rev|r)[-_\\.]?[0-9]*)\n )?\n\n # You cannot use a wild card and a dev or local version\n # together so group them with a | and make them optional.\n (?:\n (?:[-_\\.]?dev[-_\\.]?[0-9]*)? # dev release\n (?:\\+[a-z0-9]+(?:[-_\\.][a-z0-9]+)*)? # local\n |\n \\.\\* # Wild card syntax of .*\n )?\n )\n |\n (?:\n # The compatible operator requires at least two digits in the\n # release segment.\n (?<=~=) # Only match for the compatible operator\n\n \\s*\n v?\n (?:[0-9]+!)? # epoch\n [0-9]+(?:\\.[0-9]+)+ # release (We have a + instead of a *)\n (?: # pre release\n [-_\\.]?\n (a|b|c|rc|alpha|beta|pre|preview)\n [-_\\.]?\n [0-9]*\n )?\n (?: # post release\n (?:-[0-9]+)|(?:[-_\\.]?(post|rev|r)[-_\\.]?[0-9]*)\n )?\n (?:[-_\\.]?dev[-_\\.]?[0-9]*)? # dev release\n )\n |\n (?:\n # All other operators only allow a sub set of what the\n # (non)equality operators do. Specifically they do not allow\n # local versions to be specified nor do they allow the prefix\n # matching wild cards.\n (?=': '"greater_than_equal"', + '<': '"less_than"', + '>': '"greater_than"', + '===': '"arbitrary"'} + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + prefix = ".".join(list(itertools.takewhile(lambda x: not x.startswith("post") and not x.startswith("dev"), _version_split(spec)))[None[:-1]]) + prefix += ".*" + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(prospective, prefix) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + if spec.endswith(".*"): + prospective = Version(prospective.public) + split_spec = _version_split(spec[None[:-2]]) + split_prospective = _version_split(str(prospective)) + shortened_prospective = split_prospective[None[:len(split_spec)]] + padded_spec, padded_prospective = _pad_version(split_spec, shortened_prospective) + return padded_prospective == padded_spec + spec_version = Version(spec) + if not spec_version.local: + prospective = Version(prospective.public) + return prospective == spec_version + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return Version(prospective.public) <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return Version(prospective.public) >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec_str): + spec = Version(spec_str) + if not prospective < spec: + return False + if not spec.is_prerelease: + if prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec_str): + spec = Version(spec_str) + if not prospective > spec: + return False + if not spec.is_postrelease: + if prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + if self._prereleases is not None: + return self._prereleases + operator, version = self._spec + if operator in ('==', '>=', '<=', '~=', '==='): + if operator == "==": + if version.endswith(".*"): + version = version[None[:-2]] + if parse(version).is_prerelease: + return True + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile("^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + left_split.append(left[len(left_split[0])[:None]]) + right_split.append(right[len(right_split[0])[:None]]) + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + return ( + list((itertools.chain)(*left_split)), list((itertools.chain)(*right_split))) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers='', prereleases=None): + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + parsed = set() + for specifier in split_specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + self._specs = frozenset(parsed) + self._prereleases = prereleases + + def __repr__(self): + pre = ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None else "" + return "".format(str(self), pre) + + def __str__(self): + return ",".join(sorted((str(s) for s in self._specs))) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + else: + if not isinstance(other, SpecifierSet): + return NotImplemented + else: + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + else: + if self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + else: + if self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError("Cannot combine SpecifierSets with True and False prerelease overrides.") + return specifier + + def __eq__(self, other): + if isinstance(other, (string_types, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + else: + if not isinstance(other, SpecifierSet): + return NotImplemented + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, (string_types, _IndividualSpecifier)): + other = SpecifierSet(str(other)) + else: + if not isinstance(other, SpecifierSet): + return NotImplemented + return self._specs != other._specs + + def __len__(self): + return len(self._specs) + + def __iter__(self): + return iter(self._specs) + + @property + def prereleases(self): + if self._prereleases is not None: + return self._prereleases + else: + return self._specs or None + return any((s.prereleases for s in self._specs)) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + elif prereleases is None: + prereleases = self.prereleases + if not prereleases: + if item.is_prerelease: + return False + return all((s.contains(item, prereleases=prereleases) for s in self._specs)) + + def filter(self, iterable, prereleases=None): + if prereleases is None: + prereleases = self.prereleases + elif self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=(bool(prereleases))) + + return iterable + filtered = [] + found_prereleases = [] + for item in iterable: + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + if isinstance(parsed_version, LegacyVersion): + continue + if parsed_version.is_prerelease: + prereleases or filtered or found_prereleases.append(item) + else: + filtered.append(item) + + if not filtered: + if found_prereleases: + if prereleases is None: + return found_prereleases + return filtered diff --git a/APPS_UNCOMPILED/lib/packaging/tags.py b/APPS_UNCOMPILED/lib/packaging/tags.py new file mode 100644 index 0000000..34f3f6e --- /dev/null +++ b/APPS_UNCOMPILED/lib/packaging/tags.py @@ -0,0 +1,658 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/packaging/tags.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 28938 bytes +from __future__ import absolute_import +import distutils.util +try: + from importlib.machinery import EXTENSION_SUFFIXES +except ImportError: + import imp + EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] + del imp + +import collections, logging, os, platform, re, struct, sys, sysconfig, warnings +from ._typing import TYPE_CHECKING, cast +if TYPE_CHECKING: + from typing import Dict, FrozenSet, IO, Iterable, Iterator, List, Optional, Sequence, Tuple, Union + PythonVersion = Sequence[int] + MacVersion = Tuple[(int, int)] + GlibcVersion = Tuple[(int, int)] +logger = logging.getLogger(__name__) +INTERPRETER_SHORT_NAMES = { + 'python': '"py"', + 'cpython': '"cp"', + 'pypy': '"pp"', + 'ironpython': '"ip"', + 'jython': '"jy"'} +_32_BIT_INTERPRETER = sys.maxsize <= 4294967296L +_LEGACY_MANYLINUX_MAP = {(2, 17):"manylinux2014", + (2, 12):"manylinux2010", + (2, 5):"manylinux1"} +_LAST_GLIBC_MINOR = collections.defaultdict(lambda: 50) +glibcVersion = collections.namedtuple("Version", ["major", "minor"]) + +class Tag(object): + __doc__ = "\n A representation of the tag triple for a wheel.\n\n Instances are considered immutable and thus are hashable. Equality checking\n is also supported.\n " + __slots__ = [ + "_interpreter", "_abi", "_platform", "_hash"] + + def __init__(self, interpreter, abi, platform): + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self): + return self._interpreter + + @property + def abi(self): + return self._abi + + @property + def platform(self): + return self._platform + + def __eq__(self, other): + if not isinstance(other, Tag): + return NotImplemented + return self.platform == other.platform and self.abi == other.abi and self.interpreter == other.interpreter + + def __hash__(self): + return self._hash + + def __str__(self): + return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) + + def __repr__(self): + return "<{self} @ {self_id}>".format(self=self, self_id=(id(self))) + + +def parse_tag(tag): + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + + return frozenset(tags) + + +def _warn_keyword_parameter(func_name, kwargs): + """ + Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only. + """ + if not kwargs: + return False + if len(kwargs) > 1 or "warn" not in kwargs: + kwargs.pop("warn", None) + arg = next(iter(kwargs.keys())) + raise TypeError("{}() got an unexpected keyword argument {!r}".format(func_name, arg)) + return kwargs["warn"] + + +def _get_config_var(name, warn=False): + value = sysconfig.get_config_var(name) + if value is None: + if warn: + logger.debug("Config variable '%s' is unset, Python ABI tag may be incorrect", name) + return value + + +def _normalize_string(string): + return string.replace(".", "_").replace("-", "_") + + +def _abi3_applies(python_version): + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + +def _cpython_abis(py_version, warn=False): + py_version = tuple(py_version) + abis = [] + version = _version_nodot(py_version[None[:2]]) + debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if not with_debug: + if with_debug is None: + if has_refcount or has_ext: + debug = "d" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if not unicode_size == 4: + if not unicode_size is None or sys.maxunicode == 1114111: + ucs4 = "u" + elif debug: + abis.append("cp{version}".format(version=version)) + abis.insert(0, "cp{version}{debug}{pymalloc}{ucs4}".format(version=version, + debug=debug, + pymalloc=pymalloc, + ucs4=ucs4)) + return abis + + +def cpython_tags(python_version=None, abis=None, platforms=None, **kwargs): + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + warn = _warn_keyword_parameter("cpython_tags", kwargs) + if not python_version: + python_version = sys.version_info[None[:2]] + else: + interpreter = "cp{}".format(_version_nodot(python_version[None[:2]])) + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + for explicit_abi in ('abi3', 'none'): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or _platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + if _abi3_applies(python_version): + for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): + yield tag + + for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): + yield tag + + if _abi3_applies(python_version): + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format(version=(_version_nodot((python_version[0], minor_version)))) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi(): + abi = sysconfig.get_config_var("SOABI") + if abi: + yield _normalize_string(abi) + + +def generic_tags(interpreter=None, abis=None, platforms=None, **kwargs): + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + warn = _warn_keyword_parameter("generic_tags", kwargs) + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + platforms = list(platforms or _platform_tags()) + abis = list(abis) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version): + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield "py{version}".format(version=(_version_nodot(py_version[None[:2]]))) + yield "py{major}".format(major=(py_version[0])) + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield "py{version}".format(version=(_version_nodot((py_version[0], minor)))) + + +def compatible_tags(python_version=None, interpreter=None, platforms=None): + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[None[:2]] + platforms = list(platforms or _platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): + if not is_32bit: + return arch + if arch.startswith("ppc"): + return "ppc" + return "i386" + + +def _mac_binary_formats(version, cpu_arch): + formats = [ + cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + else: + if cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + else: + if cpu_arch == "ppc64" and not version > (10, 5): + if version < (10, 4): + return [] + formats.append("fat64") + else: + if cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + if cpu_arch in {'x86_64', 'arm64'}: + formats.append("universal2") + if cpu_arch in {'ppc', 'intel', 'ppc64', 'i386', 'x86_64'}: + formats.append("universal") + return formats + + +def mac_platforms(version=None, arch=None): + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[None[:2]]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + if (10, 0) <= version: + if version < (11, 0): + for minor_version in range(version[1], -1, -1): + compat_version = ( + 10, minor_version) + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format(major=10, + minor=minor_version, + binary_format=binary_format) + + if version >= (11, 0): + for major_version in range(version[0], 10, -1): + compat_version = (major_version, 0) + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format(major=major_version, + minor=0, + binary_format=binary_format) + + if version >= (11, 0): + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = ( + 10, minor_version) + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format(major=(compat_version[0]), + minor=(compat_version[1]), + binary_format=binary_format) + + +def _is_manylinux_compatible(name, arch, glibc_version): + sys_glibc = _get_glibc_version() + if sys_glibc < glibc_version: + return False + try: + import _manylinux + except ImportError: + pass + else: + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(glibc_version[0], glibc_version[1], arch) + if result is not None: + return bool(result) + elif glibc_version == (2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + elif glibc_version == (2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if glibc_version == (2, 17) and hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + + +def _glibc_version_string(): + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _glibc_version_string_confstr(): + """ + Primary implementation of glibc_version_string using os.confstr. + """ + try: + version_string = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.split() + except (AssertionError, AttributeError, OSError, ValueError): + return + else: + return version + + +def _glibc_version_string_ctypes(): + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return + else: + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return + else: + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + return + else: + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + return version_str + + +def _parse_glibc_version(version_str): + m = re.match("(?P[0-9]+)\\.(?P[0-9]+)", version_str) + if not m: + warnings.warn("Expected glibc version with 2 components major.minor, got: %s" % version_str, RuntimeWarning) + return (-1, -1) + return ( + int(m.group("major")), int(m.group("minor"))) + + +_glibc_version = [] + +def _get_glibc_version(): + if _glibc_version: + return _glibc_version[0] + else: + version_str = _glibc_version_string() + if version_str is None: + _glibc_version.append((-1, -1)) + else: + _glibc_version.append(_parse_glibc_version(version_str)) + return _glibc_version[0] + + +class _ELFFileHeader(object): + + class _InvalidELFFileHeader(ValueError): + __doc__ = "\n An invalid ELF file header was found.\n " + + ELF_MAGIC_NUMBER = 2135247942 + ELFCLASS32 = 1 + ELFCLASS64 = 2 + ELFDATA2LSB = 1 + ELFDATA2MSB = 2 + EM_386 = 3 + EM_S390 = 22 + EM_ARM = 40 + EM_X86_64 = 62 + EF_ARM_ABIMASK = 4278190080L + EF_ARM_ABI_VER5 = 83886080 + EF_ARM_ABI_FLOAT_HARD = 1024 + + def __init__(self, file): + + def unpack(fmt): + try: + result, = struct.unpack(fmt, file.read(struct.calcsize(fmt))) + except struct.error: + raise _ELFFileHeader._InvalidELFFileHeader() + + return result + + self.e_ident_magic = unpack(">I") + if self.e_ident_magic != self.ELF_MAGIC_NUMBER: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_class = unpack("B") + if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_data = unpack("B") + if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: + raise _ELFFileHeader._InvalidELFFileHeader() + self.e_ident_version = unpack("B") + self.e_ident_osabi = unpack("B") + self.e_ident_abiversion = unpack("B") + self.e_ident_pad = file.read(7) + format_h = "H" + format_i = "I" + format_q = "Q" + format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q + self.e_type = unpack(format_h) + self.e_machine = unpack(format_h) + self.e_version = unpack(format_i) + self.e_entry = unpack(format_p) + self.e_phoff = unpack(format_p) + self.e_shoff = unpack(format_p) + self.e_flags = unpack(format_i) + self.e_ehsize = unpack(format_h) + self.e_phentsize = unpack(format_h) + self.e_phnum = unpack(format_h) + self.e_shentsize = unpack(format_h) + self.e_shnum = unpack(format_h) + self.e_shstrndx = unpack(format_h) + + +def _get_elf_header(): + try: + with open(sys.executable, "rb") as f: + elf_header = _ELFFileHeader(f) + except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): + return + else: + return elf_header + + +def _is_linux_armhf(): + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_ARM + result &= elf_header.e_flags & elf_header.EF_ARM_ABIMASK == elf_header.EF_ARM_ABI_VER5 + result &= elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD == elf_header.EF_ARM_ABI_FLOAT_HARD + return result + + +def _is_linux_i686(): + elf_header = _get_elf_header() + if elf_header is None: + return False + result = elf_header.e_ident_class == elf_header.ELFCLASS32 + result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB + result &= elf_header.e_machine == elf_header.EM_386 + return result + + +def _have_compatible_manylinux_abi(arch): + if arch == "armv7l": + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() + return arch in {'ppc64le', 'ppc64', 'x86_64', 'aarch64', 's390x'} + + +def _manylinux_tags(linux, arch): + too_old_glibc2 = glibcVersion(2, 16) + if arch in {'x86_64', 'i686'}: + too_old_glibc2 = glibcVersion(2, 4) + current_glibc = glibcVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_max_list.append(glibcVersion(glibc_major, _LAST_GLIBC_MINOR[glibc_major])) + + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = ( + glibc_max.major, glibc_minor) + tag = ("manylinux_{}_{}".format)(*glibc_version) + if _is_manylinux_compatible(tag, arch, glibc_version): + yield linux.replace("linux", tag) + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_manylinux_compatible(legacy_tag, arch, glibc_version): + yield linux.replace("linux", legacy_tag) + + +def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + linux = _normalize_string(distutils.util.get_platform()) + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + else: + if linux == "linux_aarch64": + linux = "linux_armv7l" + _, arch = linux.split("_", 1) + if _have_compatible_manylinux_abi(arch): + for tag in _manylinux_tags(linux, arch): + yield tag + + yield linux + + +def _generic_platforms(): + yield _normalize_string(distutils.util.get_platform()) + + +def _platform_tags(): + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + if platform.system() == "Linux": + return _linux_platforms() + return _generic_platforms() + + +def interpreter_name(): + """ + Returns the name of the running interpreter. + """ + try: + name = sys.implementation.name + except AttributeError: + name = platform.python_implementation().lower() + + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(**kwargs): + """ + Returns the version of the running interpreter. + """ + warn = _warn_keyword_parameter("interpreter_version", kwargs) + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[None[:2]]) + return version + + +def _version_nodot(version): + return "".join(map(str, version)) + + +def sys_tags(**kwargs): + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + warn = _warn_keyword_parameter("sys_tags", kwargs) + interp_name = interpreter_name() + if interp_name == "cp": + for tag in cpython_tags(warn=warn): + yield tag + + else: + for tag in generic_tags(): + yield tag + + for tag in compatible_tags(): + yield tag diff --git a/APPS_UNCOMPILED/lib/packaging/utils.py b/APPS_UNCOMPILED/lib/packaging/utils.py new file mode 100644 index 0000000..a9c391f --- /dev/null +++ b/APPS_UNCOMPILED/lib/packaging/utils.py @@ -0,0 +1,47 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/packaging/utils.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 1915 bytes +from __future__ import absolute_import, division, print_function +import re +from ._typing import TYPE_CHECKING, cast +from .version import InvalidVersion, Version +if TYPE_CHECKING: + from typing import NewType, Union + NormalizedName = NewType("NormalizedName", str) +else: + NormalizedName = str +_canonicalize_regex = re.compile("[-_.]+") + +def canonicalize_name(name): + value = _canonicalize_regex.sub("-", name).lower() + return cast("NormalizedName", value) + + +def canonicalize_version(version): + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ + if not isinstance(version, Version): + try: + version = Version(version) + except InvalidVersion: + return version + + parts = [] + if version.epoch != 0: + parts.append("{0}!".format(version.epoch)) + parts.append(re.sub("(\\.0)+$", "", ".".join((str(x) for x in version.release)))) + if version.pre is not None: + parts.append("".join((str(x) for x in version.pre))) + if version.post is not None: + parts.append(".post{0}".format(version.post)) + if version.dev is not None: + parts.append(".dev{0}".format(version.dev)) + if version.local is not None: + parts.append("+{0}".format(version.local)) + return "".join(parts) diff --git a/APPS_UNCOMPILED/lib/packaging/version.py b/APPS_UNCOMPILED/lib/packaging/version.py new file mode 100644 index 0000000..40da02d --- /dev/null +++ b/APPS_UNCOMPILED/lib/packaging/version.py @@ -0,0 +1,354 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/packaging/version.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 15974 bytes +from __future__ import absolute_import, division, print_function +import collections, itertools, re, warnings +from ._structures import Infinity, NegativeInfinity +from ._typing import TYPE_CHECKING +if TYPE_CHECKING: + from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + from ._structures import InfinityType, NegativeInfinityType + InfiniteTypes = Union[(InfinityType, NegativeInfinityType)] + PrePostDevType = Union[(InfiniteTypes, Tuple[(str, int)])] + SubLocalType = Union[(InfiniteTypes, int, str)] + LocalType = Union[( + NegativeInfinityType, + Tuple[( + Union[( + SubLocalType, + Tuple[(SubLocalType, str)], + Tuple[(NegativeInfinityType, SubLocalType)])], + ...)])] + CmpKey = Tuple[( + int, Tuple[(int, ...)], PrePostDevType, PrePostDevType, PrePostDevType, LocalType)] + LegacyCmpKey = Tuple[(int, Tuple[(str, ...)])] + VersionComparisonMethod = Callable[( + [ + Union[(CmpKey, LegacyCmpKey)], Union[(CmpKey, LegacyCmpKey)]], bool)] +__all__ = [ + 'parse', 'Version', 'LegacyVersion', 'InvalidVersion', 'VERSION_PATTERN'] +_Version = collections.namedtuple("_Version", ['epoch', 'release', 'dev', 'pre', 'post', 'local']) + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + __doc__ = "\n An invalid version was found, users should refer to PEP 440.\n " + + +class _BaseVersion(object): + _key = None + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + if not isinstance(other, _BaseVersion): + return NotImplemented + return self._key < other._key + + def __le__(self, other): + if not isinstance(other, _BaseVersion): + return NotImplemented + return self._key <= other._key + + def __eq__(self, other): + if not isinstance(other, _BaseVersion): + return NotImplemented + return self._key == other._key + + def __ge__(self, other): + if not isinstance(other, _BaseVersion): + return NotImplemented + return self._key >= other._key + + def __gt__(self, other): + if not isinstance(other, _BaseVersion): + return NotImplemented + return self._key > other._key + + def __ne__(self, other): + if not isinstance(other, _BaseVersion): + return NotImplemented + return self._key != other._key + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + warnings.warn("Creating a LegacyVersion has been deprecated and will be removed in the next major release", DeprecationWarning) + + def __str__(self): + return self._version + + def __repr__(self): + return "".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def epoch(self): + return -1 + + @property + def release(self): + pass + + @property + def pre(self): + pass + + @property + def post(self): + pass + + @property + def dev(self): + pass + + @property + def local(self): + pass + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + @property + def is_devrelease(self): + return False + + +_legacy_version_component_re = re.compile("(\\d+ | [a-z]+ | \\.| -)", re.VERBOSE) +_legacy_version_replacement_map = { + 'pre': '"c"', + 'preview': '"c"', + '-': '"final-"', + 'rc': '"c"', + 'dev': '"@"'} + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + if part: + if part == ".": + continue + if part[None[:1]] in "0123456789": + yield part.zfill(8) + else: + yield "*" + part + + yield "*final" + + +def _legacy_cmpkey(version): + epoch = -1 + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + + return (epoch, tuple(parts)) + + +VERSION_PATTERN = "\n v?\n (?:\n (?:(?P[0-9]+)!)? # epoch\n (?P[0-9]+(?:\\.[0-9]+)*) # release segment\n (?P
                                          # pre-release\n            [-_\\.]?\n            (?P(a|b|c|rc|alpha|beta|pre|preview))\n            [-_\\.]?\n            (?P[0-9]+)?\n        )?\n        (?P                                         # post release\n            (?:-(?P[0-9]+))\n            |\n            (?:\n                [-_\\.]?\n                (?Ppost|rev|r)\n                [-_\\.]?\n                (?P[0-9]+)?\n            )\n        )?\n        (?P                                          # dev release\n            [-_\\.]?\n            (?Pdev)\n            [-_\\.]?\n            (?P[0-9]+)?\n        )?\n    )\n    (?:\\+(?P[a-z0-9]+(?:[-_\\.][a-z0-9]+)*))?       # local version\n"
+
+class Version(_BaseVersion):
+    _regex = re.compile("^\\s*" + VERSION_PATTERN + "\\s*$", re.VERBOSE | re.IGNORECASE)
+
+    def __init__(self, version):
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+        self._version = _Version(epoch=(int(match.group("epoch")) if match.group("epoch") else 0),
+          release=(tuple((int(i) for i in match.group("release").split(".")))),
+          pre=(_parse_letter_version(match.group("pre_l"), match.group("pre_n"))),
+          post=(_parse_letter_version(match.group("post_l"), match.group("post_n1") or match.group("post_n2"))),
+          dev=(_parse_letter_version(match.group("dev_l"), match.group("dev_n"))),
+          local=(_parse_local_version(match.group("local"))))
+        self._key = _cmpkey(self._version.epoch, self._version.release, self._version.pre, self._version.post, self._version.dev, self._version.local)
+
+    def __repr__(self):
+        return "".format(repr(str(self)))
+
+    def __str__(self):
+        parts = []
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
+        parts.append(".".join((str(x) for x in self.release)))
+        if self.pre is not None:
+            parts.append("".join((str(x) for x in self.pre)))
+        if self.post is not None:
+            parts.append(".post{0}".format(self.post))
+        if self.dev is not None:
+            parts.append(".dev{0}".format(self.dev))
+        if self.local is not None:
+            parts.append("+{0}".format(self.local))
+        return "".join(parts)
+
+    @property
+    def epoch(self):
+        _epoch = self._version.epoch
+        return _epoch
+
+    @property
+    def release(self):
+        _release = self._version.release
+        return _release
+
+    @property
+    def pre(self):
+        _pre = self._version.pre
+        return _pre
+
+    @property
+    def post(self):
+        if self._version.post:
+            return self._version.post[1]
+
+    @property
+    def dev(self):
+        if self._version.dev:
+            return self._version.dev[1]
+
+    @property
+    def local(self):
+        if self._version.local:
+            return ".".join((str(x) for x in self._version.local))
+        return
+
+    @property
+    def public(self):
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        parts = []
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
+        parts.append(".".join((str(x) for x in self.release)))
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self):
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self):
+        return self.post is not None
+
+    @property
+    def is_devrelease(self):
+        return self.dev is not None
+
+    @property
+    def major(self):
+        if len(self.release) >= 1:
+            return self.release[0]
+        return 0
+
+    @property
+    def minor(self):
+        if len(self.release) >= 2:
+            return self.release[1]
+        return 0
+
+    @property
+    def micro(self):
+        if len(self.release) >= 3:
+            return self.release[2]
+        return 0
+
+
+def _parse_letter_version(letter, number):
+    if letter:
+        if number is None:
+            number = 0
+        else:
+            letter = letter.lower()
+            if letter == "alpha":
+                letter = "a"
+            else:
+                if letter == "beta":
+                    letter = "b"
+                else:
+                    if letter in ('c', 'pre', 'preview'):
+                        letter = "rc"
+                    else:
+                        if letter in ('rev', 'r'):
+                            letter = "post"
+        return (
+         letter, int(number))
+    if not letter:
+        if number:
+            letter = "post"
+            return (
+             letter, int(number))
+
+
+_local_version_separators = re.compile("[\\._-]")
+
+def _parse_local_version(local):
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple((part.lower() if not part.isdigit() else int(part) for part in _local_version_separators.split(local)))
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+    _release = tuple(reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))))
+    if pre is None:
+        if post is None and dev is not None:
+            _pre = NegativeInfinity
+        else:
+            if pre is None:
+                _pre = Infinity
+            else:
+                _pre = pre
+    else:
+        if post is None:
+            _post = NegativeInfinity
+        else:
+            _post = post
+        if dev is None:
+            _dev = Infinity
+        else:
+            _dev = dev
+        if local is None:
+            _local = NegativeInfinity
+        else:
+            _local = tuple(((i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local))
+    return (epoch, _release, _pre, _post, _dev, _local)
diff --git a/APPS_UNCOMPILED/lib/paho/__init__.py b/APPS_UNCOMPILED/lib/paho/__init__.py
new file mode 100644
index 0000000..2f34f24
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/__init__.py
@@ -0,0 +1,7 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/__init__.py
+# Compiled at: 2024-04-18 03:12:55
+pass
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/__init__.py b/APPS_UNCOMPILED/lib/paho/mqtt/__init__.py
new file mode 100644
index 0000000..32572c6
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/__init__.py
@@ -0,0 +1,11 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/__init__.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 65 bytes
+__version__ = "1.6.1"
+
+class MQTTException(Exception):
+    pass
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/client.py b/APPS_UNCOMPILED/lib/paho/mqtt/client.py
new file mode 100644
index 0000000..df7b499
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/client.py
@@ -0,0 +1,3183 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/client.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 153182 bytes
+import base64, hashlib, logging, string, struct, sys, threading, time, uuid
+from .matcher import MQTTMatcher
+from .properties import Properties
+from .reasoncodes import ReasonCodes
+from .subscribeoptions import SubscribeOptions
+import collections, errno, os, platform, select, socket
+ssl = None
+try:
+    import ssl
+except ImportError:
+    pass
+
+socks = None
+try:
+    import socks
+except ImportError:
+    pass
+
+try:
+    from urllib import parse as urllib_dot_parse
+    from urllib import request as urllib_dot_request
+except ImportError:
+    import urllib as urllib_dot_request, urlparse as urllib_dot_parse
+
+try:
+    time_func = time.monotonic
+except AttributeError:
+    time_func = time.time
+
+try:
+    import dns.resolver
+except ImportError:
+    HAVE_DNS = False
+else:
+    HAVE_DNS = True
+if platform.system() == "Windows":
+    EAGAIN = errno.WSAEWOULDBLOCK
+else:
+    EAGAIN = errno.EAGAIN
+try:
+    BlockingIOError
+except NameError:
+    BlockingIOError = IOError
+
+MQTTv31 = 3
+MQTTv311 = 4
+MQTTv5 = 5
+if sys.version_info[0] >= 3:
+    unicode = str
+    basestring = str
+CONNECT = 16
+CONNACK = 32
+PUBLISH = 48
+PUBACK = 64
+PUBREC = 80
+PUBREL = 96
+PUBCOMP = 112
+SUBSCRIBE = 128
+SUBACK = 144
+UNSUBSCRIBE = 160
+UNSUBACK = 176
+PINGREQ = 192
+PINGRESP = 208
+DISCONNECT = 224
+AUTH = 240
+MQTT_LOG_INFO = 1
+MQTT_LOG_NOTICE = 2
+MQTT_LOG_WARNING = 4
+MQTT_LOG_ERR = 8
+MQTT_LOG_DEBUG = 16
+LOGGING_LEVEL = {MQTT_LOG_DEBUG: (logging.DEBUG), 
+ MQTT_LOG_INFO: (logging.INFO), 
+ MQTT_LOG_NOTICE: (logging.INFO), 
+ MQTT_LOG_WARNING: (logging.WARNING), 
+ MQTT_LOG_ERR: (logging.ERROR)}
+CONNACK_ACCEPTED = 0
+CONNACK_REFUSED_PROTOCOL_VERSION = 1
+CONNACK_REFUSED_IDENTIFIER_REJECTED = 2
+CONNACK_REFUSED_SERVER_UNAVAILABLE = 3
+CONNACK_REFUSED_BAD_USERNAME_PASSWORD = 4
+CONNACK_REFUSED_NOT_AUTHORIZED = 5
+mqtt_cs_new = 0
+mqtt_cs_connected = 1
+mqtt_cs_disconnecting = 2
+mqtt_cs_connect_async = 3
+mqtt_ms_invalid = 0
+mqtt_ms_publish = 1
+mqtt_ms_wait_for_puback = 2
+mqtt_ms_wait_for_pubrec = 3
+mqtt_ms_resend_pubrel = 4
+mqtt_ms_wait_for_pubrel = 5
+mqtt_ms_resend_pubcomp = 6
+mqtt_ms_wait_for_pubcomp = 7
+mqtt_ms_send_pubrec = 8
+mqtt_ms_queued = 9
+MQTT_ERR_AGAIN = -1
+MQTT_ERR_SUCCESS = 0
+MQTT_ERR_NOMEM = 1
+MQTT_ERR_PROTOCOL = 2
+MQTT_ERR_INVAL = 3
+MQTT_ERR_NO_CONN = 4
+MQTT_ERR_CONN_REFUSED = 5
+MQTT_ERR_NOT_FOUND = 6
+MQTT_ERR_CONN_LOST = 7
+MQTT_ERR_TLS = 8
+MQTT_ERR_PAYLOAD_SIZE = 9
+MQTT_ERR_NOT_SUPPORTED = 10
+MQTT_ERR_AUTH = 11
+MQTT_ERR_ACL_DENIED = 12
+MQTT_ERR_UNKNOWN = 13
+MQTT_ERR_ERRNO = 14
+MQTT_ERR_QUEUE_SIZE = 15
+MQTT_ERR_KEEPALIVE = 16
+MQTT_CLIENT = 0
+MQTT_BRIDGE = 1
+MQTT_CLEAN_START_FIRST_ONLY = 3
+sockpair_data = b'0'
+
+class WebsocketConnectionError(ValueError):
+    pass
+
+
+def error_string(mqtt_errno):
+    """Return the error string associated with an mqtt error number."""
+    if mqtt_errno == MQTT_ERR_SUCCESS:
+        return "No error."
+    if mqtt_errno == MQTT_ERR_NOMEM:
+        return "Out of memory."
+    if mqtt_errno == MQTT_ERR_PROTOCOL:
+        return "A network protocol error occurred when communicating with the broker."
+    if mqtt_errno == MQTT_ERR_INVAL:
+        return "Invalid function arguments provided."
+    if mqtt_errno == MQTT_ERR_NO_CONN:
+        return "The client is not currently connected."
+    if mqtt_errno == MQTT_ERR_CONN_REFUSED:
+        return "The connection was refused."
+    if mqtt_errno == MQTT_ERR_NOT_FOUND:
+        return "Message not found (internal error)."
+    if mqtt_errno == MQTT_ERR_CONN_LOST:
+        return "The connection was lost."
+    if mqtt_errno == MQTT_ERR_TLS:
+        return "A TLS error occurred."
+    if mqtt_errno == MQTT_ERR_PAYLOAD_SIZE:
+        return "Payload too large."
+    if mqtt_errno == MQTT_ERR_NOT_SUPPORTED:
+        return "This feature is not supported."
+    if mqtt_errno == MQTT_ERR_AUTH:
+        return "Authorisation failed."
+    if mqtt_errno == MQTT_ERR_ACL_DENIED:
+        return "Access denied by ACL."
+    if mqtt_errno == MQTT_ERR_UNKNOWN:
+        return "Unknown error."
+    if mqtt_errno == MQTT_ERR_ERRNO:
+        return "Error defined by errno."
+    if mqtt_errno == MQTT_ERR_QUEUE_SIZE:
+        return "Message queue full."
+    if mqtt_errno == MQTT_ERR_KEEPALIVE:
+        return "Client or broker did not communicate in the keepalive interval."
+    return "Unknown error."
+
+
+def connack_string(connack_code):
+    """Return the string associated with a CONNACK result."""
+    if connack_code == CONNACK_ACCEPTED:
+        return "Connection Accepted."
+    if connack_code == CONNACK_REFUSED_PROTOCOL_VERSION:
+        return "Connection Refused: unacceptable protocol version."
+    if connack_code == CONNACK_REFUSED_IDENTIFIER_REJECTED:
+        return "Connection Refused: identifier rejected."
+    if connack_code == CONNACK_REFUSED_SERVER_UNAVAILABLE:
+        return "Connection Refused: broker unavailable."
+    if connack_code == CONNACK_REFUSED_BAD_USERNAME_PASSWORD:
+        return "Connection Refused: bad user name or password."
+    if connack_code == CONNACK_REFUSED_NOT_AUTHORIZED:
+        return "Connection Refused: not authorised."
+    return "Connection Refused: unknown reason."
+
+
+def base62(num, base=string.digits + string.ascii_letters, padding=1):
+    """Convert a number to base-62 representation."""
+    assert num >= 0
+    digits = []
+    while num:
+        num, rest = divmod(num, 62)
+        digits.append(base[rest])
+
+    digits.extend((base[0] for _ in range(len(digits), padding)))
+    return "".join(reversed(digits))
+
+
+def topic_matches_sub(sub, topic):
+    """Check whether a topic matches a subscription.
+
+    For example:
+
+    foo/bar would match the subscription foo/# or +/bar
+    non/matching would not match the subscription non/+/+
+    """
+    matcher = MQTTMatcher()
+    matcher[sub] = True
+    try:
+        next(matcher.iter_match(topic))
+        return True
+    except StopIteration:
+        return False
+
+
+def _socketpair_compat():
+    """TCP/IP socketpair including Windows support"""
+    listensock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_IP)
+    listensock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+    listensock.bind(('127.0.0.1', 0))
+    listensock.listen(1)
+    iface, port = listensock.getsockname()
+    sock1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_IP)
+    sock1.setblocking(0)
+    try:
+        sock1.connect(("127.0.0.1", port))
+    except BlockingIOError:
+        pass
+
+    sock2, address = listensock.accept()
+    sock2.setblocking(0)
+    listensock.close()
+    return (sock1, sock2)
+
+
+class MQTTMessageInfo(object):
+    __doc__ = "This is a class returned from Client.publish() and can be used to find\n    out the mid of the message that was published, and to determine whether the\n    message has been published, and/or wait until it is published.\n    "
+    __slots__ = ('mid', '_published', '_condition', 'rc', '_iterpos')
+
+    def __init__(self, mid):
+        self.mid = mid
+        self._published = False
+        self._condition = threading.Condition()
+        self.rc = 0
+        self._iterpos = 0
+
+    def __str__(self):
+        return str((self.rc, self.mid))
+
+    def __iter__(self):
+        self._iterpos = 0
+        return self
+
+    def __next__(self):
+        return self.next()
+
+    def next(self):
+        if self._iterpos == 0:
+            self._iterpos = 1
+            return self.rc
+        if self._iterpos == 1:
+            self._iterpos = 2
+            return self.mid
+        raise StopIteration
+
+    def __getitem__(self, index):
+        if index == 0:
+            return self.rc
+        if index == 1:
+            return self.mid
+        raise IndexError("index out of range")
+
+    def _set_as_published(self):
+        with self._condition:
+            self._published = True
+            self._condition.notify()
+
+    def wait_for_publish(self, timeout=None):
+        """Block until the message associated with this object is published, or
+        until the timeout occurs. If timeout is None, this will never time out.
+        Set timeout to a positive number of seconds, e.g. 1.2, to enable the
+        timeout.
+
+        Raises ValueError if the message was not queued due to the outgoing
+        queue being full.
+
+        Raises RuntimeError if the message was not published for another
+        reason.
+        """
+        if self.rc == MQTT_ERR_QUEUE_SIZE:
+            raise ValueError("Message is not queued due to ERR_QUEUE_SIZE")
+        else:
+            if self.rc == MQTT_ERR_AGAIN:
+                pass
+            elif self.rc > 0:
+                raise RuntimeError("Message publish failed: %s" % error_string(self.rc))
+            timeout_time = None if timeout is None else time.time() + timeout
+            timeout_tenth = None if timeout is None else timeout / 10.0
+
+            def timed_out():
+                if timeout is None:
+                    return False
+                return time.time() > timeout_time
+
+            with self._condition:
+                while not self._published:
+                    if not timed_out():
+                        self._condition.wait(timeout_tenth)
+
+    def is_published(self):
+        """Returns True if the message associated with this object has been
+        published, else returns False."""
+        if self.rc == MQTT_ERR_QUEUE_SIZE:
+            raise ValueError("Message is not queued due to ERR_QUEUE_SIZE")
+        else:
+            if self.rc == MQTT_ERR_AGAIN:
+                pass
+            elif self.rc > 0:
+                raise RuntimeError("Message publish failed: %s" % error_string(self.rc))
+            with self._condition:
+                return self._published
+
+
+class MQTTMessage(object):
+    __doc__ = " This is a class that describes an incoming or outgoing message. It is\n    passed to the on_message callback as the message parameter.\n\n    Members:\n\n    topic : String. topic that the message was published on.\n    payload : Bytes/Byte array. the message payload.\n    qos : Integer. The message Quality of Service 0, 1 or 2.\n    retain : Boolean. If true, the message is a retained message and not fresh.\n    mid : Integer. The message id.\n    properties: Properties class. In MQTT v5.0, the properties associated with the message.\n    "
+    __slots__ = ('timestamp', 'state', 'dup', 'mid', '_topic', 'payload', 'qos', 'retain',
+                 'info', 'properties')
+
+    def __init__(self, mid=0, topic=b''):
+        self.timestamp = 0
+        self.state = mqtt_ms_invalid
+        self.dup = False
+        self.mid = mid
+        self._topic = topic
+        self.payload = b''
+        self.qos = 0
+        self.retain = False
+        self.info = MQTTMessageInfo(mid)
+
+    def __eq__(self, other):
+        """Override the default Equals behavior"""
+        if isinstance(other, self.__class__):
+            return self.mid == other.mid
+        return False
+
+    def __ne__(self, other):
+        """Define a non-equality test"""
+        return not self.__eq__(other)
+
+    @property
+    def topic(self):
+        return self._topic.decode("utf-8")
+
+    @topic.setter
+    def topic(self, value):
+        self._topic = value
+
+
+class Client(object):
+    __doc__ = 'MQTT version 3.1/3.1.1/5.0 client class.\n\n    This is the main class for use communicating with an MQTT broker.\n\n    General usage flow:\n\n    * Use connect()/connect_async() to connect to a broker\n    * Call loop() frequently to maintain network traffic flow with the broker\n    * Or use loop_start() to set a thread running to call loop() for you.\n    * Or use loop_forever() to handle calling loop() for you in a blocking\n    * function.\n    * Use subscribe() to subscribe to a topic and receive messages\n    * Use publish() to send messages\n    * Use disconnect() to disconnect from the broker\n\n    Data returned from the broker is made available with the use of callback\n    functions as described below.\n\n    Callbacks\n    =========\n\n    A number of callback functions are available to receive data back from the\n    broker. To use a callback, define a function and then assign it to the\n    client:\n\n    def on_connect(client, userdata, flags, rc):\n        print("Connection returned " + str(rc))\n\n    client.on_connect = on_connect\n\n    Callbacks can also be attached using decorators:\n\n    client = paho.mqtt.Client()\n\n    @client.connect_callback()\n    def on_connect(client, userdata, flags, rc):\n        print("Connection returned " + str(rc))\n\n\n    **IMPORTANT** the required function signature for a callback can differ\n    depending on whether you are using MQTT v5 or MQTT v3.1.1/v3.1. See the\n    documentation for each callback.\n\n    All of the callbacks as described below have a "client" and an "userdata"\n    argument. "client" is the Client instance that is calling the callback.\n    "userdata" is user data of any type and can be set when creating a new client\n    instance or with user_data_set(userdata).\n\n    If you wish to suppress exceptions within a callback, you should set\n    `client.suppress_exceptions = True`\n\n    The callbacks are listed below, documentation for each of them can be found\n    at the same function name:\n\n    on_connect, on_connect_fail, on_disconnect, on_message, on_publish,\n    on_subscribe, on_unsubscribe, on_log, on_socket_open, on_socket_close,\n    on_socket_register_write, on_socket_unregister_write\n    '
+
+    def __init__(self, client_id="", clean_session=None, userdata=None, protocol=MQTTv311, transport="tcp", reconnect_on_failure=True):
+        """client_id is the unique client id string used when connecting to the
+        broker. If client_id is zero length or None, then the behaviour is
+        defined by which protocol version is in use. If using MQTT v3.1.1, then
+        a zero length client id will be sent to the broker and the broker will
+        generate a random for the client. If using MQTT v3.1 then an id will be
+        randomly generated. In both cases, clean_session must be True. If this
+        is not the case a ValueError will be raised.
+
+        clean_session is a boolean that determines the client type. If True,
+        the broker will remove all information about this client when it
+        disconnects. If False, the client is a persistent client and
+        subscription information and queued messages will be retained when the
+        client disconnects.
+        Note that a client will never discard its own outgoing messages on
+        disconnect. Calling connect() or reconnect() will cause the messages to
+        be resent.  Use reinitialise() to reset a client to its original state.
+        The clean_session argument only applies to MQTT versions v3.1.1 and v3.1.
+        It is not accepted if the MQTT version is v5.0 - use the clean_start
+        argument on connect() instead.
+
+        userdata is user defined data of any type that is passed as the "userdata"
+        parameter to callbacks. It may be updated at a later point with the
+        user_data_set() function.
+
+        The protocol argument allows explicit setting of the MQTT version to
+        use for this client. Can be paho.mqtt.client.MQTTv311 (v3.1.1),
+        paho.mqtt.client.MQTTv31 (v3.1) or paho.mqtt.client.MQTTv5 (v5.0),
+        with the default being v3.1.1.
+
+        Set transport to "websockets" to use WebSockets as the transport
+        mechanism. Set to "tcp" to use raw TCP, which is the default.
+        """
+        if transport.lower() not in ('websockets', 'tcp'):
+            raise ValueError('transport must be "websockets" or "tcp", not %s' % transport)
+        else:
+            self._transport = transport.lower()
+            self._protocol = protocol
+            self._userdata = userdata
+            self._sock = None
+            self._sockpairR, self._sockpairW = (None, None)
+            self._keepalive = 60
+            self._connect_timeout = 5.0
+            self._client_mode = MQTT_CLIENT
+            if protocol == MQTTv5:
+                if clean_session is not None:
+                    raise ValueError("Clean session is not used for MQTT 5.0")
+                else:
+                    if clean_session is None:
+                        clean_session = True
+                    if not clean_session:
+                        if client_id == "" or client_id is None:
+                            raise ValueError("A client id must be provided if clean session is False.")
+                    self._clean_session = clean_session
+                if client_id == "" or client_id is None:
+                    if protocol == MQTTv31:
+                        self._client_id = base62((uuid.uuid4().int), padding=22)
+                else:
+                    self._client_id = b''
+            else:
+                pass
+            self._client_id = client_id
+        if isinstance(self._client_id, unicode):
+            self._client_id = self._client_id.encode("utf-8")
+        self._username = None
+        self._password = None
+        self._in_packet = {'command':0, 
+         'have_remaining':0, 
+         'remaining_count':[],  'remaining_mult':1, 
+         'remaining_length':0, 
+         'packet':bytearray(b''), 
+         'to_process':0, 
+         'pos':0}
+        self._out_packet = collections.deque()
+        self._last_msg_in = time_func()
+        self._last_msg_out = time_func()
+        self._reconnect_min_delay = 1
+        self._reconnect_max_delay = 120
+        self._reconnect_delay = None
+        self._reconnect_on_failure = reconnect_on_failure
+        self._ping_t = 0
+        self._last_mid = 0
+        self._state = mqtt_cs_new
+        self._out_messages = collections.OrderedDict()
+        self._in_messages = collections.OrderedDict()
+        self._max_inflight_messages = 20
+        self._inflight_messages = 0
+        self._max_queued_messages = 0
+        self._connect_properties = None
+        self._will_properties = None
+        self._will = False
+        self._will_topic = b''
+        self._will_payload = b''
+        self._will_qos = 0
+        self._will_retain = False
+        self._on_message_filtered = MQTTMatcher()
+        self._host = ""
+        self._port = 1883
+        self._bind_address = ""
+        self._bind_port = 0
+        self._proxy = {}
+        self._in_callback_mutex = threading.Lock()
+        self._callback_mutex = threading.RLock()
+        self._msgtime_mutex = threading.Lock()
+        self._out_message_mutex = threading.RLock()
+        self._in_message_mutex = threading.Lock()
+        self._reconnect_delay_mutex = threading.Lock()
+        self._mid_generate_mutex = threading.Lock()
+        self._thread = None
+        self._thread_terminate = False
+        self._ssl = False
+        self._ssl_context = None
+        self._tls_insecure = False
+        self._logger = None
+        self._registered_write = False
+        self._on_log = None
+        self._on_connect = None
+        self._on_connect_fail = None
+        self._on_subscribe = None
+        self._on_message = None
+        self._on_publish = None
+        self._on_unsubscribe = None
+        self._on_disconnect = None
+        self._on_socket_open = None
+        self._on_socket_close = None
+        self._on_socket_register_write = None
+        self._on_socket_unregister_write = None
+        self._websocket_path = "/mqtt"
+        self._websocket_extra_headers = None
+        self._mqttv5_first_connect = True
+        self.suppress_exceptions = False
+
+    def __del__(self):
+        self._reset_sockets()
+
+    def _sock_recv(self, bufsize):
+        try:
+            return self._sock.recv(bufsize)
+        except ssl.SSLWantReadError:
+            raise BlockingIOError
+        except ssl.SSLWantWriteError:
+            self._call_socket_register_write()
+            raise BlockingIOError
+
+    def _sock_send(self, buf):
+        try:
+            return self._sock.send(buf)
+        except ssl.SSLWantReadError:
+            raise BlockingIOError
+        except ssl.SSLWantWriteError:
+            self._call_socket_register_write()
+            raise BlockingIOError
+        except BlockingIOError:
+            self._call_socket_register_write()
+            raise BlockingIOError
+
+    def _sock_close(self):
+        """Close the connection to the server."""
+        if not self._sock:
+            return
+        try:
+            sock = self._sock
+            self._sock = None
+            self._call_socket_unregister_write(sock)
+            self._call_socket_close(sock)
+        finally:
+            sock.close()
+
+    def _reset_sockets(self, sockpair_only=False):
+        if sockpair_only == False:
+            self._sock_close()
+        if self._sockpairR:
+            self._sockpairR.close()
+            self._sockpairR = None
+        if self._sockpairW:
+            self._sockpairW.close()
+            self._sockpairW = None
+
+    def reinitialise(self, client_id='', clean_session=True, userdata=None):
+        self._reset_sockets()
+        self.__init__(client_id, clean_session, userdata)
+
+    def ws_set_options(self, path='/mqtt', headers=None):
+        """ Set the path and headers for a websocket connection
+
+        path is a string starting with / which should be the endpoint of the
+        mqtt connection on the remote server
+
+        headers can be either a dict or a callable object. If it is a dict then
+        the extra items in the dict are added to the websocket headers. If it is
+        a callable, then the default websocket headers are passed into this
+        function and the result is used as the new headers.
+        """
+        self._websocket_path = path
+        if headers is not None:
+            if isinstance(headers, dict) or callable(headers):
+                self._websocket_extra_headers = headers
+            else:
+                raise ValueError("'headers' option to ws_set_options has to be either a dictionary or callable")
+
+    def tls_set_context(self, context=None):
+        """Configure network encryption and authentication context. Enables SSL/TLS support.
+
+        context : an ssl.SSLContext object. By default this is given by
+        `ssl.create_default_context()`, if available.
+
+        Must be called before connect() or connect_async()."""
+        if self._ssl_context is not None:
+            raise ValueError("SSL/TLS has already been configured.")
+        elif context is None:
+            if hasattr(ssl, "create_default_context"):
+                context = ssl.create_default_context()
+            else:
+                raise ValueError("SSL/TLS context must be specified")
+        self._ssl = True
+        self._ssl_context = context
+        if hasattr(context, "check_hostname"):
+            self._tls_insecure = not context.check_hostname
+
+    def tls_set(self, ca_certs=None, certfile=None, keyfile=None, cert_reqs=None, tls_version=None, ciphers=None, keyfile_password=None):
+        """Configure network encryption and authentication options. Enables SSL/TLS support.
+
+        ca_certs : a string path to the Certificate Authority certificate files
+        that are to be treated as trusted by this client. If this is the only
+        option given then the client will operate in a similar manner to a web
+        browser. That is to say it will require the broker to have a
+        certificate signed by the Certificate Authorities in ca_certs and will
+        communicate using TLS v1,2, but will not attempt any form of
+        authentication. This provides basic network encryption but may not be
+        sufficient depending on how the broker is configured.
+        By default, on Python 2.7.9+ or 3.4+, the default certification
+        authority of the system is used. On older Python version this parameter
+        is mandatory.
+
+        certfile and keyfile are strings pointing to the PEM encoded client
+        certificate and private keys respectively. If these arguments are not
+        None then they will be used as client information for TLS based
+        authentication.  Support for this feature is broker dependent. Note
+        that if either of these files in encrypted and needs a password to
+        decrypt it, then this can be passed using the keyfile_password
+        argument - you should take precautions to ensure that your password is
+        not hard coded into your program by loading the password from a file
+        for example. If you do not provide keyfile_password, the password will
+        be requested to be typed in at a terminal window.
+
+        cert_reqs allows the certificate requirements that the client imposes
+        on the broker to be changed. By default this is ssl.CERT_REQUIRED,
+        which means that the broker must provide a certificate. See the ssl
+        pydoc for more information on this parameter.
+
+        tls_version allows the version of the SSL/TLS protocol used to be
+        specified. By default TLS v1.2 is used. Previous versions are allowed
+        but not recommended due to possible security problems.
+
+        ciphers is a string specifying which encryption ciphers are allowable
+        for this connection, or None to use the defaults. See the ssl pydoc for
+        more information.
+
+        Must be called before connect() or connect_async()."""
+        if ssl is None:
+            raise ValueError("This platform has no SSL/TLS.")
+        else:
+            if not hasattr(ssl, "SSLContext"):
+                raise ValueError("Python 2.7.9 and 3.2 are the minimum supported versions for TLS.")
+            elif ca_certs is None:
+                if not hasattr(ssl.SSLContext, "load_default_certs"):
+                    raise ValueError("ca_certs must not be None.")
+            if tls_version is None:
+                tls_version = ssl.PROTOCOL_TLSv1_2
+                if hasattr(ssl, "PROTOCOL_TLS"):
+                    tls_version = ssl.PROTOCOL_TLS
+            context = ssl.SSLContext(tls_version)
+            if certfile is not None:
+                context.load_cert_chain(certfile, keyfile, keyfile_password)
+            if cert_reqs == ssl.CERT_NONE:
+                if hasattr(context, "check_hostname"):
+                    context.check_hostname = False
+                else:
+                    context.verify_mode = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+                    if ca_certs is not None:
+                        context.load_verify_locations(ca_certs)
+                    else:
+                        context.load_default_certs()
+                if ciphers is not None:
+                    context.set_ciphers(ciphers)
+                self.tls_set_context(context)
+                if cert_reqs != ssl.CERT_NONE:
+                    self.tls_insecure_set(False)
+            else:
+                self.tls_insecure_set(True)
+
+    def tls_insecure_set(self, value):
+        """Configure verification of the server hostname in the server certificate.
+
+        If value is set to true, it is impossible to guarantee that the host
+        you are connecting to is not impersonating your server. This can be
+        useful in initial server testing, but makes it possible for a malicious
+        third party to impersonate your server through DNS spoofing, for
+        example.
+
+        Do not use this function in a real system. Setting value to true means
+        there is no point using encryption.
+
+        Must be called before connect() and after either tls_set() or
+        tls_set_context()."""
+        if self._ssl_context is None:
+            raise ValueError("Must configure SSL context before using tls_insecure_set.")
+        self._tls_insecure = value
+        if hasattr(self._ssl_context, "check_hostname"):
+            self._ssl_context.check_hostname = not value
+
+    def proxy_set(self, **proxy_args):
+        """Configure proxying of MQTT connection. Enables support for SOCKS or
+        HTTP proxies.
+
+        Proxying is done through the PySocks library. Brief descriptions of the
+        proxy_args parameters are below; see the PySocks docs for more info.
+
+        (Required)
+        proxy_type: One of {socks.HTTP, socks.SOCKS4, or socks.SOCKS5}
+        proxy_addr: IP address or DNS name of proxy server
+
+        (Optional)
+        proxy_rdns: boolean indicating whether proxy lookup should be performed
+            remotely (True, default) or locally (False)
+        proxy_username: username for SOCKS5 proxy, or userid for SOCKS4 proxy
+        proxy_password: password for SOCKS5 proxy
+
+        Must be called before connect() or connect_async()."""
+        if socks is None:
+            raise ValueError("PySocks must be installed for proxy support.")
+        else:
+            if not self._proxy_is_valid(proxy_args):
+                raise ValueError("proxy_type and/or proxy_addr are invalid.")
+            else:
+                self._proxy = proxy_args
+
+    def enable_logger(self, logger=None):
+        """ Enables a logger to send log messages to """
+        if logger is None:
+            if self._logger is not None:
+                return
+            logger = logging.getLogger(__name__)
+        self._logger = logger
+
+    def disable_logger(self):
+        self._logger = None
+
+    def connect(self, host, port=1883, keepalive=60, bind_address="", bind_port=0, clean_start=MQTT_CLEAN_START_FIRST_ONLY, properties=None):
+        """Connect to a remote broker.
+
+        host is the hostname or IP address of the remote broker.
+        port is the network port of the server host to connect to. Defaults to
+        1883. Note that the default port for MQTT over SSL/TLS is 8883 so if you
+        are using tls_set() the port may need providing.
+        keepalive: Maximum period in seconds between communications with the
+        broker. If no other messages are being exchanged, this controls the
+        rate at which the client will send ping messages to the broker.
+        clean_start: (MQTT v5.0 only) True, False or MQTT_CLEAN_START_FIRST_ONLY.
+        Sets the MQTT v5.0 clean_start flag always, never or on the first successful connect only,
+        respectively.  MQTT session data (such as outstanding messages and subscriptions)
+        is cleared on successful connect when the clean_start flag is set.
+        properties: (MQTT v5.0 only) the MQTT v5.0 properties to be sent in the
+        MQTT connect packet.
+        """
+        if self._protocol == MQTTv5:
+            self._mqttv5_first_connect = True
+        else:
+            if clean_start != MQTT_CLEAN_START_FIRST_ONLY:
+                raise ValueError("Clean start only applies to MQTT V5")
+            if properties != None:
+                raise ValueError("Properties only apply to MQTT V5")
+        self.connect_async(host, port, keepalive, bind_address, bind_port, clean_start, properties)
+        return self.reconnect()
+
+    def connect_srv(self, domain=None, keepalive=60, bind_address="", clean_start=MQTT_CLEAN_START_FIRST_ONLY, properties=None):
+        """Connect to a remote broker.
+
+        domain is the DNS domain to search for SRV records; if None,
+        try to determine local domain name.
+        keepalive, bind_address, clean_start and properties are as for connect()
+        """
+        if HAVE_DNS is False:
+            raise ValueError('No DNS resolver library found, try "pip install dnspython" or "pip3 install dnspython3".')
+        elif domain is None:
+            domain = socket.getfqdn()
+            domain = domain[(domain.find(".") + 1)[:None]]
+        try:
+            rr = "_mqtt._tcp.%s" % domain
+            if self._ssl:
+                rr = "_secure-mqtt._tcp.%s" % domain
+            answers = []
+            for answer in dns.resolver.query(rr, dns.rdatatype.SRV):
+                addr = answer.target.to_text()[None[:-1]]
+                answers.append((
+                 addr, answer.port, answer.priority, answer.weight))
+
+        except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer, dns.resolver.NoNameservers):
+            raise ValueError("No answer/NXDOMAIN for SRV in %s" % domain)
+
+        for answer in answers:
+            host, port, prio, weight = answer
+            try:
+                return self.connect(host, port, keepalive, bind_address, clean_start, properties)
+            except Exception:
+                pass
+
+        raise ValueError("No SRV hosts responded")
+
+    def connect_async(self, host, port=1883, keepalive=60, bind_address="", bind_port=0, clean_start=MQTT_CLEAN_START_FIRST_ONLY, properties=None):
+        """Connect to a remote broker asynchronously. This is a non-blocking
+        connect call that can be used with loop_start() to provide very quick
+        start.
+
+        host is the hostname or IP address of the remote broker.
+        port is the network port of the server host to connect to. Defaults to
+        1883. Note that the default port for MQTT over SSL/TLS is 8883 so if you
+        are using tls_set() the port may need providing.
+        keepalive: Maximum period in seconds between communications with the
+        broker. If no other messages are being exchanged, this controls the
+        rate at which the client will send ping messages to the broker.
+        clean_start: (MQTT v5.0 only) True, False or MQTT_CLEAN_START_FIRST_ONLY.
+        Sets the MQTT v5.0 clean_start flag always, never or on the first successful connect only,
+        respectively.  MQTT session data (such as outstanding messages and subscriptions)
+        is cleared on successful connect when the clean_start flag is set.
+        properties: (MQTT v5.0 only) the MQTT v5.0 properties to be sent in the
+        MQTT connect packet.  Use the Properties class.
+        """
+        if not host is None:
+            if len(host) == 0:
+                raise ValueError("Invalid host.")
+            if port <= 0:
+                raise ValueError("Invalid port number.")
+            if keepalive < 0:
+                raise ValueError("Keepalive must be >=0.")
+        elif bind_address != "":
+            if bind_address is not None and not sys.version_info < (2, 7):
+                if (3, 0) < sys.version_info < (3, 2):
+                    raise ValueError("bind_address requires Python 2.7 or 3.2.")
+        if bind_port < 0:
+            raise ValueError("Invalid bind port number.")
+        self._host = host
+        self._port = port
+        self._keepalive = keepalive
+        self._bind_address = bind_address
+        self._bind_port = bind_port
+        self._clean_start = clean_start
+        self._connect_properties = properties
+        self._state = mqtt_cs_connect_async
+
+    def reconnect_delay_set(self, min_delay=1, max_delay=120):
+        """ Configure the exponential reconnect delay
+
+            When connection is lost, wait initially min_delay seconds and
+            double this time every attempt. The wait is capped at max_delay.
+            Once the client is fully connected (e.g. not only TCP socket, but
+            received a success CONNACK), the wait timer is reset to min_delay.
+        """
+        with self._reconnect_delay_mutex:
+            self._reconnect_min_delay = min_delay
+            self._reconnect_max_delay = max_delay
+            self._reconnect_delay = None
+
+    def reconnect(self):
+        """Reconnect the client after a disconnect. Can only be called after
+        connect()/connect_async()."""
+        if len(self._host) == 0:
+            raise ValueError("Invalid host.")
+        elif self._port <= 0:
+            raise ValueError("Invalid port number.")
+        self._in_packet = {'command':0, 
+         'have_remaining':0, 
+         'remaining_count':[],  'remaining_mult':1, 
+         'remaining_length':0, 
+         'packet':bytearray(b''), 
+         'to_process':0, 
+         'pos':0}
+        self._out_packet = collections.deque()
+        with self._msgtime_mutex:
+            self._last_msg_in = time_func()
+            self._last_msg_out = time_func()
+        self._ping_t = 0
+        self._state = mqtt_cs_new
+        self._sock_close()
+        self._messages_reconnect_reset()
+        sock = self._create_socket_connection()
+        if self._ssl:
+            verify_host = not self._tls_insecure
+            try:
+                sock = self._ssl_context.wrap_socket(sock,
+                  server_hostname=(self._host),
+                  do_handshake_on_connect=False)
+            except ssl.CertificateError:
+                raise
+            except ValueError:
+                sock = self._ssl_context.wrap_socket(sock,
+                  do_handshake_on_connect=False)
+            else:
+                if hasattr(self._ssl_context, "check_hostname"):
+                    if self._ssl_context.check_hostname:
+                        verify_host = False
+                sock.settimeout(self._keepalive)
+                sock.do_handshake()
+                if verify_host:
+                    ssl.match_hostname(sock.getpeercert(), self._host)
+        if self._transport == "websockets":
+            sock.settimeout(self._keepalive)
+            sock = WebsocketWrapper(sock, self._host, self._port, self._ssl, self._websocket_path, self._websocket_extra_headers)
+        self._sock = sock
+        self._sock.setblocking(0)
+        self._registered_write = False
+        self._call_socket_open()
+        return self._send_connect(self._keepalive)
+
+    def loop(self, timeout=1.0, max_packets=1):
+        """Process network events.
+
+        It is strongly recommended that you use loop_start(), or
+        loop_forever(), or if you are using an external event loop using
+        loop_read(), loop_write(), and loop_misc(). Using loop() on it's own is
+        no longer recommended.
+
+        This function must be called regularly to ensure communication with the
+        broker is carried out. It calls select() on the network socket to wait
+        for network events. If incoming data is present it will then be
+        processed. Outgoing commands, from e.g. publish(), are normally sent
+        immediately that their function is called, but this is not always
+        possible. loop() will also attempt to send any remaining outgoing
+        messages, which also includes commands that are part of the flow for
+        messages with QoS>0.
+
+        timeout: The time in seconds to wait for incoming/outgoing network
+            traffic before timing out and returning.
+        max_packets: Not currently used.
+
+        Returns MQTT_ERR_SUCCESS on success.
+        Returns >0 on error.
+
+        A ValueError will be raised if timeout < 0"""
+        if self._sockpairR is None or self._sockpairW is None:
+            self._reset_sockets(sockpair_only=True)
+            self._sockpairR, self._sockpairW = _socketpair_compat()
+        return self._loop(timeout)
+
+    def _loop(self, timeout=1.0):
+        if timeout < 0.0:
+            raise ValueError("Invalid timeout.")
+        else:
+            try:
+                packet = self._out_packet.popleft()
+                self._out_packet.appendleft(packet)
+                wlist = [self._sock]
+            except IndexError:
+                wlist = []
+
+            pending_bytes = 0
+            if hasattr(self._sock, "pending"):
+                pending_bytes = self._sock.pending()
+            if pending_bytes > 0:
+                timeout = 0.0
+            if self._sockpairR is None:
+                rlist = [
+                 self._sock]
+            else:
+                rlist = [
+                 self._sock, self._sockpairR]
+        try:
+            socklist = select.select(rlist, wlist, [], timeout)
+        except TypeError:
+            return MQTT_ERR_CONN_LOST
+        except ValueError:
+            return MQTT_ERR_CONN_LOST
+        except Exception:
+            return MQTT_ERR_UNKNOWN
+        else:
+            if self._sock in socklist[0] or pending_bytes > 0:
+                rc = self.loop_read()
+                if not rc:
+                    if self._sock is None:
+                        return rc
+            elif self._sockpairR:
+                if self._sockpairR in socklist[0]:
+                    socklist[1].insert(0, self._sock)
+                    try:
+                        self._sockpairR.recv(10000)
+                    except BlockingIOError:
+                        pass
+
+            if self._sock in socklist[1]:
+                rc = self.loop_write()
+                if rc or self._sock is None:
+                    return rc
+            return self.loop_misc()
+
+    def publish(self, topic, payload=None, qos=0, retain=False, properties=None):
+        """Publish a message on a topic.
+
+        This causes a message to be sent to the broker and subsequently from
+        the broker to any clients subscribing to matching topics.
+
+        topic: The topic that the message should be published on.
+        payload: The actual message to send. If not given, or set to None a
+        zero length message will be used. Passing an int or float will result
+        in the payload being converted to a string representing that number. If
+        you wish to send a true int/float, use struct.pack() to create the
+        payload you require.
+        qos: The quality of service level to use.
+        retain: If set to true, the message will be set as the "last known
+        good"/retained message for the topic.
+        properties: (MQTT v5.0 only) the MQTT v5.0 properties to be included.
+        Use the Properties class.
+
+        Returns a MQTTMessageInfo class, which can be used to determine whether
+        the message has been delivered (using info.is_published()) or to block
+        waiting for the message to be delivered (info.wait_for_publish()). The
+        message ID and return code of the publish() call can be found at
+        info.mid and info.rc.
+
+        For backwards compatibility, the MQTTMessageInfo class is iterable so
+        the old construct of (rc, mid) = client.publish(...) is still valid.
+
+        rc is MQTT_ERR_SUCCESS to indicate success or MQTT_ERR_NO_CONN if the
+        client is not currently connected.  mid is the message ID for the
+        publish request. The mid value can be used to track the publish request
+        by checking against the mid argument in the on_publish() callback if it
+        is defined.
+
+        A ValueError will be raised if topic is None, has zero length or is
+        invalid (contains a wildcard), except if the MQTT version used is v5.0.
+        For v5.0, a zero length topic can be used when a Topic Alias has been set.
+
+        A ValueError will be raised if qos is not one of 0, 1 or 2, or if
+        the length of the payload is greater than 268435455 bytes."""
+        if self._protocol != MQTTv5 and not topic is None:
+            if len(topic) == 0:
+                raise ValueError("Invalid topic.")
+        else:
+            topic = topic.encode("utf-8")
+            if self._topic_wildcard_len_check(topic) != MQTT_ERR_SUCCESS:
+                raise ValueError("Publish topic cannot contain wildcards.")
+            elif not qos < 0:
+                if qos > 2:
+                    raise ValueError("Invalid QoS level.")
+                if isinstance(payload, unicode):
+                    local_payload = payload.encode("utf-8")
+            elif isinstance(payload, (bytes, bytearray)):
+                local_payload = payload
+            else:
+                if isinstance(payload, (int, float)):
+                    local_payload = str(payload).encode("ascii")
+                else:
+                    if payload is None:
+                        local_payload = b''
+                    else:
+                        raise TypeError("payload must be a string, bytearray, int, float or None.")
+        if len(local_payload) > 268435455:
+            raise ValueError("Payload too large.")
+        local_mid = self._mid_generate()
+        if qos == 0:
+            info = MQTTMessageInfo(local_mid)
+            rc = self._send_publish(local_mid, topic, local_payload, qos, retain, False, info, properties)
+            info.rc = rc
+            return info
+        message = MQTTMessage(local_mid, topic)
+        message.timestamp = time_func()
+        message.payload = local_payload
+        message.qos = qos
+        message.retain = retain
+        message.dup = False
+        message.properties = properties
+        with self._out_message_mutex:
+            if self._max_queued_messages > 0:
+                if len(self._out_messages) >= self._max_queued_messages:
+                    message.info.rc = MQTT_ERR_QUEUE_SIZE
+                    return message.info
+            else:
+                if local_mid in self._out_messages:
+                    message.info.rc = MQTT_ERR_QUEUE_SIZE
+                    return message.info
+                    self._out_messages[message.mid] = message
+                    if self._max_inflight_messages == 0 or self._inflight_messages < self._max_inflight_messages:
+                        self._inflight_messages += 1
+                        if qos == 1:
+                            message.state = mqtt_ms_wait_for_puback
+                elif qos == 2:
+                    message.state = mqtt_ms_wait_for_pubrec
+                rc = self._send_publish(message.mid, topic, message.payload, message.qos, message.retain, message.dup, message.info, message.properties)
+                if rc is MQTT_ERR_NO_CONN:
+                    self._inflight_messages -= 1
+                    message.state = mqtt_ms_publish
+                message.info.rc = rc
+                return message.info
+            message.state = mqtt_ms_queued
+            message.info.rc = MQTT_ERR_SUCCESS
+            return message.info
+
+    def username_pw_set(self, username, password=None):
+        """Set a username and optionally a password for broker authentication.
+
+        Must be called before connect() to have any effect.
+        Requires a broker that supports MQTT v3.1.
+
+        username: The username to authenticate with. Need have no relationship to the client id. Must be unicode
+            [MQTT-3.1.3-11].
+            Set to None to reset client back to not using username/password for broker authentication.
+        password: The password to authenticate with. Optional, set to None if not required. If it is unicode, then it
+            will be encoded as UTF-8.
+        """
+        self._username = None if username is None else username.encode("utf-8")
+        self._password = password
+        if isinstance(self._password, unicode):
+            self._password = self._password.encode("utf-8")
+
+    def enable_bridge_mode(self):
+        """Sets the client in a bridge mode instead of client mode.
+
+        Must be called before connect() to have any effect.
+        Requires brokers that support bridge mode.
+
+        Under bridge mode, the broker will identify the client as a bridge and
+        not send it's own messages back to it. Hence a subsciption of # is
+        possible without message loops. This feature also correctly propagates
+        the retain flag on the messages.
+
+        Currently Mosquitto and RSMB support this feature. This feature can
+        be used to create a bridge between multiple broker.
+        """
+        self._client_mode = MQTT_BRIDGE
+
+    def is_connected(self):
+        """Returns the current status of the connection
+
+        True if connection exists
+        False if connection is closed
+        """
+        return self._state == mqtt_cs_connected
+
+    def disconnect(self, reasoncode=None, properties=None):
+        """Disconnect a connected client from the broker.
+        reasoncode: (MQTT v5.0 only) a ReasonCodes instance setting the MQTT v5.0
+        reasoncode to be sent with the disconnect.  It is optional, the receiver
+        then assuming that 0 (success) is the value.
+        properties: (MQTT v5.0 only) a Properties instance setting the MQTT v5.0 properties
+        to be included. Optional - if not set, no properties are sent.
+        """
+        self._state = mqtt_cs_disconnecting
+        if self._sock is None:
+            return MQTT_ERR_NO_CONN
+        return self._send_disconnect(reasoncode, properties)
+
+    def subscribeParse error at or near `JUMP_BACK' instruction at offset 334_336
+
+    def unsubscribe(self, topic, properties=None):
+        """Unsubscribe the client from one or more topics.
+
+        topic: A single string, or list of strings that are the subscription
+               topics to unsubscribe from.
+        properties: (MQTT v5.0 only) a Properties instance setting the MQTT v5.0 properties
+        to be included. Optional - if not set, no properties are sent.
+
+        Returns a tuple (result, mid), where result is MQTT_ERR_SUCCESS
+        to indicate success or (MQTT_ERR_NO_CONN, None) if the client is not
+        currently connected.
+        mid is the message ID for the unsubscribe request. The mid value can be
+        used to track the unsubscribe request by checking against the mid
+        argument in the on_unsubscribe() callback if it is defined.
+
+        Raises a ValueError if topic is None or has zero string length, or is
+        not a string or list.
+        """
+        topic_list = None
+        if topic is None:
+            raise ValueError("Invalid topic.")
+        elif isinstance(topic, basestring):
+            if len(topic) == 0:
+                raise ValueError("Invalid topic.")
+            topic_list = [
+             topic.encode("utf-8")]
+        else:
+            if isinstance(topic, list):
+                topic_list = []
+                for t in topic:
+                    if not len(t) == 0:
+                        if not isinstance(t, basestring):
+                            raise ValueError("Invalid topic.")
+                        topic_list.append(t.encode("utf-8"))
+
+        if topic_list is None:
+            raise ValueError("No topic specified, or incorrect topic type.")
+        if self._sock is None:
+            return (
+             MQTT_ERR_NO_CONN, None)
+        return self._send_unsubscribe(False, topic_list, properties)
+
+    def loop_read(self, max_packets=1):
+        """Process read network events. Use in place of calling loop() if you
+        wish to handle your client reads as part of your own application.
+
+        Use socket() to obtain the client socket to call select() or equivalent
+        on.
+
+        Do not use if you are using the threaded interface loop_start()."""
+        if self._sock is None:
+            return MQTT_ERR_NO_CONN
+        max_packets = len(self._out_messages) + len(self._in_messages)
+        if max_packets < 1:
+            max_packets = 1
+        for _ in range(0, max_packets):
+            if self._sock is None:
+                return MQTT_ERR_NO_CONN
+                rc = self._packet_read()
+                if rc > 0:
+                    return self._loop_rc_handle(rc)
+                if rc == MQTT_ERR_AGAIN:
+                    return MQTT_ERR_SUCCESS
+
+        return MQTT_ERR_SUCCESS
+
+    def loop_write(self, max_packets=1):
+        """Process write network events. Use in place of calling loop() if you
+        wish to handle your client writes as part of your own application.
+
+        Use socket() to obtain the client socket to call select() or equivalent
+        on.
+
+        Use want_write() to determine if there is data waiting to be written.
+
+        Do not use if you are using the threaded interface loop_start()."""
+        if self._sock is None:
+            return MQTT_ERR_NO_CONN
+        try:
+            rc = self._packet_write()
+            if rc == MQTT_ERR_AGAIN:
+                return MQTT_ERR_SUCCESS
+            if rc > 0:
+                return self._loop_rc_handle(rc)
+            return MQTT_ERR_SUCCESS
+        finally:
+            if self.want_write():
+                self._call_socket_register_write()
+            else:
+                self._call_socket_unregister_write()
+
+    def want_write(self):
+        """Call to determine if there is network data waiting to be written.
+        Useful if you are calling select() yourself rather than using loop().
+        """
+        try:
+            packet = self._out_packet.popleft()
+            self._out_packet.appendleft(packet)
+            return True
+        except IndexError:
+            return False
+
+    def loop_misc(self):
+        """Process miscellaneous network events. Use in place of calling loop() if you
+        wish to call select() or equivalent on.
+
+        Do not use if you are using the threaded interface loop_start()."""
+        if self._sock is None:
+            return MQTT_ERR_NO_CONN
+        now = time_func()
+        self._check_keepalive()
+        if self._ping_t > 0:
+            if now - self._ping_t >= self._keepalive:
+                self._sock_close()
+                if self._state == mqtt_cs_disconnecting:
+                    rc = MQTT_ERR_SUCCESS
+                else:
+                    rc = MQTT_ERR_KEEPALIVE
+                self._do_on_disconnect(rc)
+                return MQTT_ERR_CONN_LOST
+        return MQTT_ERR_SUCCESS
+
+    def max_inflight_messages_set(self, inflight):
+        """Set the maximum number of messages with QoS>0 that can be part way
+        through their network flow at once. Defaults to 20."""
+        if inflight < 0:
+            raise ValueError("Invalid inflight.")
+        self._max_inflight_messages = inflight
+
+    def max_queued_messages_set(self, queue_size):
+        """Set the maximum number of messages in the outgoing message queue.
+        0 means unlimited."""
+        if queue_size < 0:
+            raise ValueError("Invalid queue size.")
+        if not isinstance(queue_size, int):
+            raise ValueError("Invalid type of queue size.")
+        self._max_queued_messages = queue_size
+        return self
+
+    def message_retry_set(self, retry):
+        """No longer used, remove in version 2.0"""
+        pass
+
+    def user_data_set(self, userdata):
+        """Set the user data variable passed to callbacks. May be any data type."""
+        self._userdata = userdata
+
+    def will_set(self, topic, payload=None, qos=0, retain=False, properties=None):
+        """Set a Will to be sent by the broker in case the client disconnects unexpectedly.
+
+        This must be called before connect() to have any effect.
+
+        topic: The topic that the will message should be published on.
+        payload: The message to send as a will. If not given, or set to None a
+        zero length message will be used as the will. Passing an int or float
+        will result in the payload being converted to a string representing
+        that number. If you wish to send a true int/float, use struct.pack() to
+        create the payload you require.
+        qos: The quality of service level to use for the will.
+        retain: If set to true, the will message will be set as the "last known
+        good"/retained message for the topic.
+        properties: (MQTT v5.0 only) a Properties instance setting the MQTT v5.0 properties
+        to be included with the will message. Optional - if not set, no properties are sent.
+
+        Raises a ValueError if qos is not 0, 1 or 2, or if topic is None or has
+        zero string length.
+        """
+        if not topic is None:
+            if len(topic) == 0:
+                raise ValueError("Invalid topic.")
+            if qos < 0 or qos > 2:
+                raise ValueError("Invalid QoS level.")
+        elif properties != None:
+            if not isinstance(properties, Properties):
+                raise ValueError("The properties argument must be an instance of the Properties class.")
+        elif isinstance(payload, unicode):
+            self._will_payload = payload.encode("utf-8")
+        else:
+            if isinstance(payload, (bytes, bytearray)):
+                self._will_payload = payload
+            else:
+                if isinstance(payload, (int, float)):
+                    self._will_payload = str(payload).encode("ascii")
+                else:
+                    if payload is None:
+                        self._will_payload = b''
+                    else:
+                        raise TypeError("payload must be a string, bytearray, int, float or None.")
+        self._will = True
+        self._will_topic = topic.encode("utf-8")
+        self._will_qos = qos
+        self._will_retain = retain
+        self._will_properties = properties
+
+    def will_clear(self):
+        """ Removes a will that was previously configured with will_set().
+
+        Must be called before connect() to have any effect."""
+        self._will = False
+        self._will_topic = b''
+        self._will_payload = b''
+        self._will_qos = 0
+        self._will_retain = False
+
+    def socket(self):
+        """Return the socket or ssl object for this client."""
+        return self._sock
+
+    def loop_forever(self, timeout=1.0, max_packets=1, retry_first_connection=False):
+        """This function calls the network loop functions for you in an
+        infinite blocking loop. It is useful for the case where you only want
+        to run the MQTT client loop in your program.
+
+        loop_forever() will handle reconnecting for you if reconnect_on_failure is
+        true (this is the default behavior). If you call disconnect() in a callback
+        it will return.
+
+        timeout: The time in seconds to wait for incoming/outgoing network
+          traffic before timing out and returning.
+        max_packets: Not currently used.
+        retry_first_connection: Should the first connection attempt be retried on failure.
+          This is independent of the reconnect_on_failure setting.
+
+        Raises OSError/WebsocketConnectionError on first connection failures unless retry_first_connection=True
+        """
+        run = True
+        while run:
+            if self._thread_terminate is True:
+                break
+            if self._state == mqtt_cs_connect_async:
+                try:
+                    self.reconnect()
+                except (OSError, WebsocketConnectionError):
+                    self._handle_on_connect_fail()
+                    if not retry_first_connection:
+                        raise
+                    self._easy_log(MQTT_LOG_DEBUG, "Connection failed, retrying")
+                    self._reconnect_wait()
+
+            else:
+                break
+
+        while run:
+            rc = MQTT_ERR_SUCCESS
+            while rc == MQTT_ERR_SUCCESS:
+                rc = self._loop(timeout)
+                if self._thread_terminate is True and len(self._out_packet) == 0 and len(self._out_messages) == 0:
+                    rc = 1
+                    run = False
+
+            def should_exit():
+                return self._state == mqtt_cs_disconnecting or run is False or self._thread_terminate is True
+
+            if not should_exit():
+                run = self._reconnect_on_failure or False
+            else:
+                self._reconnect_wait()
+                if should_exit():
+                    run = False
+                else:
+                    try:
+                        self.reconnect()
+                    except (OSError, WebsocketConnectionError):
+                        self._handle_on_connect_fail()
+                        self._easy_log(MQTT_LOG_DEBUG, "Connection failed, retrying")
+
+        return rc
+
+    def loop_start(self):
+        """This is part of the threaded client interface. Call this once to
+        start a new thread to process network traffic. This provides an
+        alternative to repeatedly calling loop() yourself.
+        """
+        if self._thread is not None:
+            return MQTT_ERR_INVAL
+        self._sockpairR, self._sockpairW = _socketpair_compat()
+        self._thread_terminate = False
+        self._thread = threading.Thread(target=(self._thread_main))
+        self._thread.daemon = True
+        self._thread.start()
+
+    def loop_stop(self, force=False):
+        """This is part of the threaded client interface. Call this once to
+        stop the network thread previously created with loop_start(). This call
+        will block until the network thread finishes.
+
+        The force parameter is currently ignored.
+        """
+        if self._thread is None:
+            return MQTT_ERR_INVAL
+        self._thread_terminate = True
+        if threading.current_thread() != self._thread:
+            self._thread.join()
+            self._thread = None
+
+    @property
+    def on_log(self):
+        """If implemented, called when the client has log information.
+        Defined to allow debugging."""
+        return self._on_log
+
+    @on_log.setter
+    def on_log(self, func):
+        """ Define the logging callback implementation.
+
+        Expected signature is:
+            log_callback(client, userdata, level, buf)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        level:      gives the severity of the message and will be one of
+                    MQTT_LOG_INFO, MQTT_LOG_NOTICE, MQTT_LOG_WARNING,
+                    MQTT_LOG_ERR, and MQTT_LOG_DEBUG.
+        buf:        the message itself
+
+        Decorator: @client.log_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+        """
+        self._on_log = func
+
+    def log_callback(self):
+
+        def decorator(func):
+            self.on_log = func
+            return func
+
+        return decorator
+
+    @property
+    def on_connect(self):
+        """If implemented, called when the broker responds to our connection
+        request."""
+        return self._on_connect
+
+    @on_connect.setter
+    def on_connect(self, func):
+        """ Define the connect callback implementation.
+
+        Expected signature for MQTT v3.1 and v3.1.1 is:
+            connect_callback(client, userdata, flags, rc)
+
+        and for MQTT v5.0:
+            connect_callback(client, userdata, flags, reasonCode, properties)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        flags:      response flags sent by the broker
+        rc:         the connection result
+        reasonCode: the MQTT v5.0 reason code: an instance of the ReasonCode class.
+                    ReasonCode may be compared to integer.
+        properties: the MQTT v5.0 properties returned from the broker.  An instance
+                    of the Properties class.
+                    For MQTT v3.1 and v3.1.1 properties is not provided but for compatibility
+                    with MQTT v5.0, we recommend adding properties=None.
+
+        flags is a dict that contains response flags from the broker:
+            flags['session present'] - this flag is useful for clients that are
+                using clean session set to 0 only. If a client with clean
+                session=0, that reconnects to a broker that it has previously
+                connected to, this flag indicates whether the broker still has the
+                session information for the client. If 1, the session still exists.
+
+        The value of rc indicates success or not:
+            0: Connection successful
+            1: Connection refused - incorrect protocol version
+            2: Connection refused - invalid client identifier
+            3: Connection refused - server unavailable
+            4: Connection refused - bad username or password
+            5: Connection refused - not authorised
+            6-255: Currently unused.
+
+        Decorator: @client.connect_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+
+        """
+        with self._callback_mutex:
+            self._on_connect = func
+
+    def connect_callback(self):
+
+        def decorator(func):
+            self.on_connect = func
+            return func
+
+        return decorator
+
+    @property
+    def on_connect_fail(self):
+        """If implemented, called when the client failed to connect
+        to the broker."""
+        return self._on_connect_fail
+
+    @on_connect_fail.setter
+    def on_connect_fail(self, func):
+        """ Define the connection failure callback implementation
+
+        Expected signature is:
+            on_connect_fail(client, userdata)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+
+        Decorator: @client.connect_fail_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+
+        """
+        with self._callback_mutex:
+            self._on_connect_fail = func
+
+    def connect_fail_callback(self):
+
+        def decorator(func):
+            self.on_connect_fail = func
+            return func
+
+        return decorator
+
+    @property
+    def on_subscribe(self):
+        """If implemented, called when the broker responds to a subscribe
+        request."""
+        return self._on_subscribe
+
+    @on_subscribe.setter
+    def on_subscribe(self, func):
+        """ Define the subscribe callback implementation.
+
+        Expected signature for MQTT v3.1.1 and v3.1 is:
+            subscribe_callback(client, userdata, mid, granted_qos)
+
+        and for MQTT v5.0:
+            subscribe_callback(client, userdata, mid, reasonCodes, properties)
+
+        client:         the client instance for this callback
+        userdata:       the private user data as set in Client() or userdata_set()
+        mid:            matches the mid variable returned from the corresponding
+                        subscribe() call.
+        granted_qos:    list of integers that give the QoS level the broker has
+                        granted for each of the different subscription requests.
+        reasonCodes:    the MQTT v5.0 reason codes received from the broker for each
+                        subscription.  A list of ReasonCodes instances.
+        properties:     the MQTT v5.0 properties received from the broker.  A
+                        list of Properties class instances.
+
+        Decorator: @client.subscribe_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+        """
+        with self._callback_mutex:
+            self._on_subscribe = func
+
+    def subscribe_callback(self):
+
+        def decorator(func):
+            self.on_subscribe = func
+            return func
+
+        return decorator
+
+    @property
+    def on_message(self):
+        """If implemented, called when a message has been received on a topic
+        that the client subscribes to.
+
+        This callback will be called for every message received. Use
+        message_callback_add() to define multiple callbacks that will be called
+        for specific topic filters."""
+        return self._on_message
+
+    @on_message.setter
+    def on_message(self, func):
+        """ Define the message received callback implementation.
+
+        Expected signature is:
+            on_message_callback(client, userdata, message)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        message:    an instance of MQTTMessage.
+                    This is a class with members topic, payload, qos, retain.
+
+        Decorator: @client.message_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+
+        """
+        with self._callback_mutex:
+            self._on_message = func
+
+    def message_callback(self):
+
+        def decorator(func):
+            self.on_message = func
+            return func
+
+        return decorator
+
+    @property
+    def on_publish(self):
+        """If implemented, called when a message that was to be sent using the
+        publish() call has completed transmission to the broker.
+
+        For messages with QoS levels 1 and 2, this means that the appropriate
+        handshakes have completed. For QoS 0, this simply means that the message
+        has left the client.
+        This callback is important because even if the publish() call returns
+        success, it does not always mean that the message has been sent."""
+        return self._on_publish
+
+    @on_publish.setter
+    def on_publish(self, func):
+        """ Define the published message callback implementation.
+
+        Expected signature is:
+            on_publish_callback(client, userdata, mid)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        mid:        matches the mid variable returned from the corresponding
+                    publish() call, to allow outgoing messages to be tracked.
+
+        Decorator: @client.publish_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+
+        """
+        with self._callback_mutex:
+            self._on_publish = func
+
+    def publish_callback(self):
+
+        def decorator(func):
+            self.on_publish = func
+            return func
+
+        return decorator
+
+    @property
+    def on_unsubscribe(self):
+        """If implemented, called when the broker responds to an unsubscribe
+        request."""
+        return self._on_unsubscribe
+
+    @on_unsubscribe.setter
+    def on_unsubscribe(self, func):
+        """ Define the unsubscribe callback implementation.
+
+        Expected signature for MQTT v3.1.1 and v3.1 is:
+            unsubscribe_callback(client, userdata, mid)
+
+        and for MQTT v5.0:
+            unsubscribe_callback(client, userdata, mid, properties, reasonCodes)
+
+        client:         the client instance for this callback
+        userdata:       the private user data as set in Client() or userdata_set()
+        mid:            matches the mid variable returned from the corresponding
+                        unsubscribe() call.
+        properties:     the MQTT v5.0 properties received from the broker.  A
+                        list of Properties class instances.
+        reasonCodes:    the MQTT v5.0 reason codes received from the broker for each
+                        unsubscribe topic.  A list of ReasonCodes instances
+
+        Decorator: @client.unsubscribe_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+        """
+        with self._callback_mutex:
+            self._on_unsubscribe = func
+
+    def unsubscribe_callback(self):
+
+        def decorator(func):
+            self.on_unsubscribe = func
+            return func
+
+        return decorator
+
+    @property
+    def on_disconnect(self):
+        """If implemented, called when the client disconnects from the broker.
+        """
+        return self._on_disconnect
+
+    @on_disconnect.setter
+    def on_disconnect(self, func):
+        """ Define the disconnect callback implementation.
+
+        Expected signature for MQTT v3.1.1 and v3.1 is:
+            disconnect_callback(client, userdata, rc)
+
+        and for MQTT v5.0:
+            disconnect_callback(client, userdata, reasonCode, properties)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        rc:         the disconnection result
+                    The rc parameter indicates the disconnection state. If
+                    MQTT_ERR_SUCCESS (0), the callback was called in response to
+                    a disconnect() call. If any other value the disconnection
+                    was unexpected, such as might be caused by a network error.
+
+        Decorator: @client.disconnect_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+
+        """
+        with self._callback_mutex:
+            self._on_disconnect = func
+
+    def disconnect_callback(self):
+
+        def decorator(func):
+            self.on_disconnect = func
+            return func
+
+        return decorator
+
+    @property
+    def on_socket_open(self):
+        """If implemented, called just after the socket was opend."""
+        return self._on_socket_open
+
+    @on_socket_open.setter
+    def on_socket_open(self, func):
+        """Define the socket_open callback implementation.
+
+        This should be used to register the socket to an external event loop for reading.
+
+        Expected signature is:
+            socket_open_callback(client, userdata, socket)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        sock:       the socket which was just opened.
+
+        Decorator: @client.socket_open_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+        """
+        with self._callback_mutex:
+            self._on_socket_open = func
+
+    def socket_open_callback(self):
+
+        def decorator(func):
+            self.on_socket_open = func
+            return func
+
+        return decorator
+
+    def _call_socket_open(self):
+        """Call the socket_open callback with the just-opened socket"""
+        with self._callback_mutex:
+            on_socket_open = self.on_socket_open
+        if on_socket_open:
+            with self._in_callback_mutex:
+                try:
+                    on_socket_open(self, self._userdata, self._sock)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in on_socket_open: %s", err)
+                        if not self.suppress_exceptions:
+                            raise
+                    finally:
+                        err = None
+                        del err
+
+    @property
+    def on_socket_close(self):
+        """If implemented, called just before the socket is closed."""
+        return self._on_socket_close
+
+    @on_socket_close.setter
+    def on_socket_close(self, func):
+        """Define the socket_close callback implementation.
+
+        This should be used to unregister the socket from an external event loop for reading.
+
+        Expected signature is:
+            socket_close_callback(client, userdata, socket)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        sock:       the socket which is about to be closed.
+
+        Decorator: @client.socket_close_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+        """
+        with self._callback_mutex:
+            self._on_socket_close = func
+
+    def socket_close_callback(self):
+
+        def decorator(func):
+            self.on_socket_close = func
+            return func
+
+        return decorator
+
+    def _call_socket_close(self, sock):
+        """Call the socket_close callback with the about-to-be-closed socket"""
+        with self._callback_mutex:
+            on_socket_close = self.on_socket_close
+        if on_socket_close:
+            with self._in_callback_mutex:
+                try:
+                    on_socket_close(self, self._userdata, sock)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in on_socket_close: %s", err)
+                        if not self.suppress_exceptions:
+                            raise
+                    finally:
+                        err = None
+                        del err
+
+    @property
+    def on_socket_register_write(self):
+        """If implemented, called when the socket needs writing but can't."""
+        return self._on_socket_register_write
+
+    @on_socket_register_write.setter
+    def on_socket_register_write(self, func):
+        """Define the socket_register_write callback implementation.
+
+        This should be used to register the socket with an external event loop for writing.
+
+        Expected signature is:
+            socket_register_write_callback(client, userdata, socket)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        sock:       the socket which should be registered for writing
+
+        Decorator: @client.socket_register_write_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+        """
+        with self._callback_mutex:
+            self._on_socket_register_write = func
+
+    def socket_register_write_callback(self):
+
+        def decorator(func):
+            self._on_socket_register_write = func
+            return func
+
+        return decorator
+
+    def _call_socket_register_write(self):
+        """Call the socket_register_write callback with the unwritable socket"""
+        if not self._sock or self._registered_write:
+            return
+        self._registered_write = True
+        with self._callback_mutex:
+            on_socket_register_write = self.on_socket_register_write
+        if on_socket_register_write:
+            try:
+                on_socket_register_write(self, self._userdata, self._sock)
+            except Exception as err:
+                try:
+                    self._easy_log(MQTT_LOG_ERR, "Caught exception in on_socket_register_write: %s", err)
+                    if not self.suppress_exceptions:
+                        raise
+                finally:
+                    err = None
+                    del err
+
+    @property
+    def on_socket_unregister_write(self):
+        """If implemented, called when the socket doesn't need writing anymore."""
+        return self._on_socket_unregister_write
+
+    @on_socket_unregister_write.setter
+    def on_socket_unregister_write(self, func):
+        """Define the socket_unregister_write callback implementation.
+
+        This should be used to unregister the socket from an external event loop for writing.
+
+        Expected signature is:
+            socket_unregister_write_callback(client, userdata, socket)
+
+        client:     the client instance for this callback
+        userdata:   the private user data as set in Client() or userdata_set()
+        sock:       the socket which should be unregistered for writing
+
+        Decorator: @client.socket_unregister_write_callback() (```client``` is the name of the
+            instance which this callback is being attached to)
+        """
+        with self._callback_mutex:
+            self._on_socket_unregister_write = func
+
+    def socket_unregister_write_callback(self):
+
+        def decorator(func):
+            self._on_socket_unregister_write = func
+            return func
+
+        return decorator
+
+    def _call_socket_unregister_write(self, sock=None):
+        """Call the socket_unregister_write callback with the writable socket"""
+        sock = sock or self._sock
+        return sock and self._registered_write or None
+        self._registered_write = False
+        with self._callback_mutex:
+            on_socket_unregister_write = self.on_socket_unregister_write
+        if on_socket_unregister_write:
+            try:
+                on_socket_unregister_write(self, self._userdata, sock)
+            except Exception as err:
+                try:
+                    self._easy_log(MQTT_LOG_ERR, "Caught exception in on_socket_unregister_write: %s", err)
+                    if not self.suppress_exceptions:
+                        raise
+                finally:
+                    err = None
+                    del err
+
+    def message_callback_add(self, sub, callback):
+        """Register a message callback for a specific topic.
+        Messages that match 'sub' will be passed to 'callback'. Any
+        non-matching messages will be passed to the default on_message
+        callback.
+
+        Call multiple times with different 'sub' to define multiple topic
+        specific callbacks.
+
+        Topic specific callbacks may be removed with
+        message_callback_remove()."""
+        if callback is None or sub is None:
+            raise ValueError("sub and callback must both be defined.")
+        with self._callback_mutex:
+            self._on_message_filtered[sub] = callback
+
+    def topic_callback(self, sub):
+
+        def decorator(func):
+            self.message_callback_add(sub, func)
+            return func
+
+        return decorator
+
+    def message_callback_remove(self, sub):
+        """Remove a message callback previously registered with
+        message_callback_add()."""
+        if sub is None:
+            raise ValueError("sub must defined.")
+        with self._callback_mutex:
+            try:
+                del self._on_message_filtered[sub]
+            except KeyError:
+                pass
+
+    def _loop_rc_handle(self, rc, properties=None):
+        if rc:
+            self._sock_close()
+            if self._state == mqtt_cs_disconnecting:
+                rc = MQTT_ERR_SUCCESS
+            self._do_on_disconnect(rc, properties)
+        return rc
+
+    def _packet_read(self):
+        if self._in_packet["command"] == 0:
+            try:
+                command = self._sock_recv(1)
+            except BlockingIOError:
+                return MQTT_ERR_AGAIN
+            except ConnectionError as err:
+                try:
+                    self._easy_log(MQTT_LOG_ERR, "failed to receive on socket: %s", err)
+                    return MQTT_ERR_CONN_LOST
+                finally:
+                    err = None
+                    del err
+
+            else:
+                if len(command) == 0:
+                    return MQTT_ERR_CONN_LOST
+                command, = struct.unpack("!B", command)
+                self._in_packet["command"] = command
+        if self._in_packet["have_remaining"] == 0:
+            while 1:
+                try:
+                    byte = self._sock_recv(1)
+                except BlockingIOError:
+                    return MQTT_ERR_AGAIN
+                except ConnectionError as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "failed to receive on socket: %s", err)
+                        return MQTT_ERR_CONN_LOST
+                    finally:
+                        err = None
+                        del err
+
+                else:
+                    if len(byte) == 0:
+                        return MQTT_ERR_CONN_LOST
+                    byte, = struct.unpack("!B", byte)
+                    self._in_packet["remaining_count"].append(byte)
+                    if len(self._in_packet["remaining_count"]) > 4:
+                        return MQTT_ERR_PROTOCOL
+                    self._in_packet["remaining_length"] += (byte & 127) * self._in_packet["remaining_mult"]
+                    self._in_packet["remaining_mult"] = self._in_packet["remaining_mult"] * 128
+                if byte & 128 == 0:
+                    break
+
+            self._in_packet["have_remaining"] = 1
+            self._in_packet["to_process"] = self._in_packet["remaining_length"]
+        count = 100
+        while self._in_packet["to_process"] > 0:
+            try:
+                data = self._sock_recv(self._in_packet["to_process"])
+            except BlockingIOError:
+                return MQTT_ERR_AGAIN
+            except ConnectionError as err:
+                try:
+                    self._easy_log(MQTT_LOG_ERR, "failed to receive on socket: %s", err)
+                    return MQTT_ERR_CONN_LOST
+                finally:
+                    err = None
+                    del err
+
+            else:
+                if len(data) == 0:
+                    return MQTT_ERR_CONN_LOST
+                self._in_packet["to_process"] -= len(data)
+                self._in_packet["packet"] += data
+            count -= 1
+            if count == 0:
+                with self._msgtime_mutex:
+                    self._last_msg_in = time_func()
+                return MQTT_ERR_AGAIN
+
+        self._in_packet["pos"] = 0
+        rc = self._packet_handle()
+        self._in_packet = {'command':0, 
+         'have_remaining':0, 
+         'remaining_count':[],  'remaining_mult':1, 
+         'remaining_length':0, 
+         'packet':bytearray(b''), 
+         'to_process':0, 
+         'pos':0}
+        with self._msgtime_mutex:
+            self._last_msg_in = time_func()
+        return rc
+
+    def _packet_write(self):
+        while True:
+            try:
+                packet = self._out_packet.popleft()
+            except IndexError:
+                return MQTT_ERR_SUCCESS
+            else:
+                try:
+                    write_length = self._sock_send(packet["packet"][packet["pos"][:None]])
+                except (AttributeError, ValueError):
+                    self._out_packet.appendleft(packet)
+                    return MQTT_ERR_SUCCESS
+                except BlockingIOError:
+                    self._out_packet.appendleft(packet)
+                    return MQTT_ERR_AGAIN
+                except ConnectionError as err:
+                    try:
+                        self._out_packet.appendleft(packet)
+                        self._easy_log(MQTT_LOG_ERR, "failed to receive on socket: %s", err)
+                        return MQTT_ERR_CONN_LOST
+                    finally:
+                        err = None
+                        del err
+
+                if write_length > 0:
+                    packet["to_process"] -= write_length
+                    packet["pos"] += write_length
+                    if packet["to_process"] == 0:
+                        if packet["command"] & 240 == PUBLISH:
+                            if packet["qos"] == 0:
+                                with self._callback_mutex:
+                                    on_publish = self.on_publish
+                                if on_publish:
+                                    with self._in_callback_mutex:
+                                        try:
+                                            on_publish(self, self._userdata, packet["mid"])
+                                        except Exception as err:
+                                            try:
+                                                self._easy_log(MQTT_LOG_ERR, "Caught exception in on_publish: %s", err)
+                                                if not self.suppress_exceptions:
+                                                    raise
+                                            finally:
+                                                err = None
+                                                del err
+
+                                packet["info"]._set_as_published()
+                        if packet["command"] & 240 == DISCONNECT:
+                            with self._msgtime_mutex:
+                                self._last_msg_out = time_func()
+                            self._do_on_disconnect(MQTT_ERR_SUCCESS)
+                            self._sock_close()
+                            return MQTT_ERR_SUCCESS
+                    else:
+                        self._out_packet.appendleft(packet)
+                else:
+                    break
+
+        with self._msgtime_mutex:
+            self._last_msg_out = time_func()
+        return MQTT_ERR_SUCCESS
+
+    def _easy_log(self, level, fmt, *args):
+        if self.on_log is not None:
+            buf = fmt % args
+            try:
+                self.on_log(self, self._userdata, level, buf)
+            except Exception:
+                pass
+
+        if self._logger is not None:
+            level_std = LOGGING_LEVEL[level]
+            (self._logger.log)(level_std, fmt, *args)
+
+    def _check_keepalive(self):
+        if self._keepalive == 0:
+            return MQTT_ERR_SUCCESS
+            now = time_func()
+            with self._msgtime_mutex:
+                last_msg_out = self._last_msg_out
+                last_msg_in = self._last_msg_in
+            if not self._sock is not None or now - last_msg_out >= self._keepalive or now - last_msg_in >= self._keepalive:
+                if self._state == mqtt_cs_connected and self._ping_t == 0:
+                    try:
+                        self._send_pingreq()
+                    except Exception:
+                        self._sock_close()
+                        self._do_on_disconnect(MQTT_ERR_CONN_LOST)
+                    else:
+                        with self._msgtime_mutex:
+                            self._last_msg_out = now
+                            self._last_msg_in = now
+        else:
+            self._sock_close()
+            if self._state == mqtt_cs_disconnecting:
+                rc = MQTT_ERR_SUCCESS
+            else:
+                rc = MQTT_ERR_KEEPALIVE
+            self._do_on_disconnect(rc)
+
+    def _mid_generate(self):
+        with self._mid_generate_mutex:
+            self._last_mid += 1
+            if self._last_mid == 65536:
+                self._last_mid = 1
+            return self._last_mid
+
+    @staticmethod
+    def _topic_wildcard_len_check(topic):
+        if b'+' in topic or b'#' in topic or len(topic) > 65535:
+            return MQTT_ERR_INVAL
+        return MQTT_ERR_SUCCESS
+
+    @staticmethod
+    def _filter_wildcard_len_checkParse error at or near `LOAD_GLOBAL' instruction at offset 60
+
+    def _send_pingreq(self):
+        self._easy_log(MQTT_LOG_DEBUG, "Sending PINGREQ")
+        rc = self._send_simple_command(PINGREQ)
+        if rc == MQTT_ERR_SUCCESS:
+            self._ping_t = time_func()
+        return rc
+
+    def _send_pingresp(self):
+        self._easy_log(MQTT_LOG_DEBUG, "Sending PINGRESP")
+        return self._send_simple_command(PINGRESP)
+
+    def _send_puback(self, mid):
+        self._easy_log(MQTT_LOG_DEBUG, "Sending PUBACK (Mid: %d)", mid)
+        return self._send_command_with_mid(PUBACK, mid, False)
+
+    def _send_pubcomp(self, mid):
+        self._easy_log(MQTT_LOG_DEBUG, "Sending PUBCOMP (Mid: %d)", mid)
+        return self._send_command_with_mid(PUBCOMP, mid, False)
+
+    def _pack_remaining_length(self, packet, remaining_length):
+        remaining_bytes = []
+        while 1:
+            byte = remaining_length % 128
+            remaining_length = remaining_length // 128
+            if remaining_length > 0:
+                byte |= 128
+            remaining_bytes.append(byte)
+            packet.append(byte)
+            if remaining_length == 0:
+                return packet
+
+    def _pack_str16(self, packet, data):
+        if isinstance(data, unicode):
+            data = data.encode("utf-8")
+        packet.extend(struct.pack("!H", len(data)))
+        packet.extend(data)
+
+    def _send_publishParse error at or near `COME_FROM' instruction at offset 32_0
+
+    def _send_pubrec(self, mid):
+        self._easy_log(MQTT_LOG_DEBUG, "Sending PUBREC (Mid: %d)", mid)
+        return self._send_command_with_mid(PUBREC, mid, False)
+
+    def _send_pubrel(self, mid):
+        self._easy_log(MQTT_LOG_DEBUG, "Sending PUBREL (Mid: %d)", mid)
+        return self._send_command_with_mid(PUBREL | 2, mid, False)
+
+    def _send_command_with_mid(self, command, mid, dup):
+        if dup:
+            command |= 8
+        remaining_length = 2
+        packet = struct.pack("!BBH", command, remaining_length, mid)
+        return self._packet_queue(command, packet, mid, 1)
+
+    def _send_simple_command(self, command):
+        remaining_length = 0
+        packet = struct.pack("!BB", command, remaining_length)
+        return self._packet_queue(command, packet, 0, 0)
+
+    def _send_connect(self, keepalive):
+        proto_ver = self._protocol
+        protocol = b'MQTT' if proto_ver >= MQTTv311 else b'MQIsdp'
+        remaining_length = 2 + len(protocol) + 1 + 1 + 2 + 2 + len(self._client_id)
+        connect_flags = 0
+        if self._protocol == MQTTv5:
+            if self._clean_start == True:
+                connect_flags |= 2
+        elif self._clean_start == MQTT_CLEAN_START_FIRST_ONLY:
+            if self._mqttv5_first_connect:
+                connect_flags |= 2
+            else:
+                if self._clean_session:
+                    connect_flags |= 2
+                elif self._will:
+                    remaining_length += 2 + len(self._will_topic) + 2 + len(self._will_payload)
+                    connect_flags |= 4 | (self._will_qos & 3) << 3 | (self._will_retain & 1) << 5
+                if self._username is not None:
+                    remaining_length += 2 + len(self._username)
+                    connect_flags |= 128
+                    if self._password is not None:
+                        connect_flags |= 64
+                        remaining_length += 2 + len(self._password)
+            if self._protocol == MQTTv5:
+                if self._connect_properties is None:
+                    packed_connect_properties = b'\x00'
+        else:
+            packed_connect_properties = self._connect_properties.pack()
+        remaining_length += len(packed_connect_properties)
+        if self._will:
+            if self._will_properties is None:
+                packed_will_properties = b'\x00'
+            else:
+                packed_will_properties = self._will_properties.pack()
+            remaining_length += len(packed_will_properties)
+        command = CONNECT
+        packet = bytearray()
+        packet.append(command)
+        if self._client_mode == MQTT_BRIDGE:
+            proto_ver |= 128
+        self._pack_remaining_length(packet, remaining_length)
+        packet.extend(struct.pack("!H" + str(len(protocol)) + "sBBH", len(protocol), protocol, proto_ver, connect_flags, keepalive))
+        if self._protocol == MQTTv5:
+            packet += packed_connect_properties
+        self._pack_str16(packet, self._client_id)
+        if self._will:
+            if self._protocol == MQTTv5:
+                packet += packed_will_properties
+            self._pack_str16(packet, self._will_topic)
+            self._pack_str16(packet, self._will_payload)
+        elif self._username is not None:
+            self._pack_str16(packet, self._username)
+            if self._password is not None:
+                self._pack_str16(packet, self._password)
+            self._keepalive = keepalive
+            if self._protocol == MQTTv5:
+                self._easy_log(MQTT_LOG_DEBUG, "Sending CONNECT (u%d, p%d, wr%d, wq%d, wf%d, c%d, k%d) client_id=%s properties=%s", (connect_flags & 128) >> 7, (connect_flags & 64) >> 6, (connect_flags & 32) >> 5, (connect_flags & 24) >> 3, (connect_flags & 4) >> 2, (connect_flags & 2) >> 1, keepalive, self._client_id, self._connect_properties)
+        else:
+            self._easy_log(MQTT_LOG_DEBUG, "Sending CONNECT (u%d, p%d, wr%d, wq%d, wf%d, c%d, k%d) client_id=%s", (connect_flags & 128) >> 7, (connect_flags & 64) >> 6, (connect_flags & 32) >> 5, (connect_flags & 24) >> 3, (connect_flags & 4) >> 2, (connect_flags & 2) >> 1, keepalive, self._client_id)
+        return self._packet_queue(command, packet, 0, 0)
+
+    def _send_disconnect(self, reasoncode=None, properties=None):
+        if self._protocol == MQTTv5:
+            self._easy_log(MQTT_LOG_DEBUG, "Sending DISCONNECT reasonCode=%s properties=%s", reasoncode, properties)
+        else:
+            self._easy_log(MQTT_LOG_DEBUG, "Sending DISCONNECT")
+        remaining_length = 0
+        command = DISCONNECT
+        packet = bytearray()
+        packet.append(command)
+        if self._protocol == MQTTv5:
+            if properties is not None or reasoncode is not None:
+                if reasoncode is None:
+                    reasoncode = ReasonCodes((DISCONNECT >> 4), identifier=0)
+                remaining_length += 1
+                if properties is not None:
+                    packed_props = properties.pack()
+                    remaining_length += len(packed_props)
+        self._pack_remaining_length(packet, remaining_length)
+        if self._protocol == MQTTv5:
+            if reasoncode != None:
+                packet += reasoncode.pack()
+                if properties != None:
+                    packet += packed_props
+        return self._packet_queue(command, packet, 0, 0)
+
+    def _send_subscribe(self, dup, topics, properties=None):
+        remaining_length = 2
+        if self._protocol == MQTTv5:
+            if properties is None:
+                packed_subscribe_properties = b'\x00'
+            else:
+                packed_subscribe_properties = properties.pack()
+            remaining_length += len(packed_subscribe_properties)
+        for t, _ in topics:
+            remaining_length += 2 + len(t) + 1
+
+        command = SUBSCRIBE | dup << 3 | 2
+        packet = bytearray()
+        packet.append(command)
+        self._pack_remaining_length(packet, remaining_length)
+        local_mid = self._mid_generate()
+        packet.extend(struct.pack("!H", local_mid))
+        if self._protocol == MQTTv5:
+            packet += packed_subscribe_properties
+        for t, q in topics:
+            self._pack_str16(packet, t)
+            if self._protocol == MQTTv5:
+                packet += q.pack()
+            else:
+                packet.append(q)
+
+        self._easy_log(MQTT_LOG_DEBUG, "Sending SUBSCRIBE (d%d, m%d) %s", dup, local_mid, topics)
+        return (
+         self._packet_queue(command, packet, local_mid, 1), local_mid)
+
+    def _send_unsubscribe(self, dup, topics, properties=None):
+        remaining_length = 2
+        if self._protocol == MQTTv5:
+            if properties is None:
+                packed_unsubscribe_properties = b'\x00'
+            else:
+                packed_unsubscribe_properties = properties.pack()
+            remaining_length += len(packed_unsubscribe_properties)
+        else:
+            for t in topics:
+                remaining_length += 2 + len(t)
+
+            command = UNSUBSCRIBE | dup << 3 | 2
+            packet = bytearray()
+            packet.append(command)
+            self._pack_remaining_length(packet, remaining_length)
+            local_mid = self._mid_generate()
+            packet.extend(struct.pack("!H", local_mid))
+            if self._protocol == MQTTv5:
+                packet += packed_unsubscribe_properties
+            for t in topics:
+                self._pack_str16(packet, t)
+
+            if self._protocol == MQTTv5:
+                self._easy_log(MQTT_LOG_DEBUG, "Sending UNSUBSCRIBE (d%d, m%d) %s %s", dup, local_mid, properties, topics)
+            else:
+                self._easy_log(MQTT_LOG_DEBUG, "Sending UNSUBSCRIBE (d%d, m%d) %s", dup, local_mid, topics)
+        return (
+         self._packet_queue(command, packet, local_mid, 1), local_mid)
+
+    def _check_clean_session(self):
+        if self._protocol == MQTTv5:
+            if self._clean_start == MQTT_CLEAN_START_FIRST_ONLY:
+                return self._mqttv5_first_connect
+            return self._clean_start
+        else:
+            return self._clean_session
+
+    def _messages_reconnect_reset_outParse error at or near `COME_FROM' instruction at offset 200_0
+
+    def _messages_reconnect_reset_in(self):
+        with self._in_message_mutex:
+            if self._check_clean_session():
+                self._in_messages = collections.OrderedDict()
+                return
+            for m in self._in_messages.values():
+                m.timestamp = 0
+                if m.qos != 2:
+                    self._in_messages.pop(m.mid)
+                    continue
+
+    def _messages_reconnect_reset(self):
+        self._messages_reconnect_reset_out()
+        self._messages_reconnect_reset_in()
+
+    def _packet_queue(self, command, packet, mid, qos, info=None):
+        mpkt = {'command':command, 
+         'mid':mid, 
+         'qos':qos, 
+         'pos':0, 
+         'to_process':len(packet), 
+         'packet':packet, 
+         'info':info}
+        self._out_packet.append(mpkt)
+        if self._sockpairW is not None:
+            try:
+                self._sockpairW.send(sockpair_data)
+            except BlockingIOError:
+                pass
+
+        if self._thread is None:
+            if self._on_socket_register_write is None:
+                if self._in_callback_mutex.acquire(False):
+                    self._in_callback_mutex.release()
+                    return self.loop_write()
+        self._call_socket_register_write()
+        return MQTT_ERR_SUCCESS
+
+    def _packet_handle(self):
+        cmd = self._in_packet["command"] & 240
+        if cmd == PINGREQ:
+            return self._handle_pingreq()
+        if cmd == PINGRESP:
+            return self._handle_pingresp()
+        if cmd == PUBACK:
+            return self._handle_pubackcomp("PUBACK")
+        if cmd == PUBCOMP:
+            return self._handle_pubackcomp("PUBCOMP")
+        if cmd == PUBLISH:
+            return self._handle_publish()
+        if cmd == PUBREC:
+            return self._handle_pubrec()
+        if cmd == PUBREL:
+            return self._handle_pubrel()
+        if cmd == CONNACK:
+            return self._handle_connack()
+        if cmd == SUBACK:
+            return self._handle_suback()
+        if cmd == UNSUBACK:
+            return self._handle_unsuback()
+        if cmd == DISCONNECT:
+            if self._protocol == MQTTv5:
+                return self._handle_disconnect()
+        self._easy_log(MQTT_LOG_ERR, "Error: Unrecognised command %s", cmd)
+        return MQTT_ERR_PROTOCOL
+
+    def _handle_pingreq(self):
+        if self._in_packet["remaining_length"] != 0:
+            return MQTT_ERR_PROTOCOL
+        self._easy_log(MQTT_LOG_DEBUG, "Received PINGREQ")
+        return self._send_pingresp()
+
+    def _handle_pingresp(self):
+        if self._in_packet["remaining_length"] != 0:
+            return MQTT_ERR_PROTOCOL
+        self._ping_t = 0
+        self._easy_log(MQTT_LOG_DEBUG, "Received PINGRESP")
+        return MQTT_ERR_SUCCESS
+
+    def _handle_connack(self):
+        if self._protocol == MQTTv5:
+            if self._in_packet["remaining_length"] < 2:
+                return MQTT_ERR_PROTOCOL
+            else:
+                if self._in_packet["remaining_length"] != 2:
+                    return MQTT_ERR_PROTOCOL
+        else:
+            if self._protocol == MQTTv5:
+                flags, result = struct.unpack("!BB", self._in_packet["packet"][None[:2]])
+                if result == 1:
+                    reason = 132
+                    properties = None
+                else:
+                    reason = ReasonCodes((CONNACK >> 4), identifier=result)
+                    properties = Properties(CONNACK >> 4)
+                    properties.unpack(self._in_packet["packet"][2[:None]])
+            else:
+                flags, result = struct.unpack("!BB", self._in_packet["packet"])
+            if self._protocol == MQTTv311:
+                if result == CONNACK_REFUSED_PROTOCOL_VERSION:
+                    if not self._reconnect_on_failure:
+                        return MQTT_ERR_PROTOCOL
+                    self._easy_log(MQTT_LOG_DEBUG, "Received CONNACK (%s, %s), attempting downgrade to MQTT v3.1.", flags, result)
+                    self._protocol = MQTTv31
+                    return self.reconnect()
+                if result == CONNACK_REFUSED_IDENTIFIER_REJECTED:
+                    if self._client_id == b'':
+                        if not self._reconnect_on_failure:
+                            return MQTT_ERR_PROTOCOL
+                        self._easy_log(MQTT_LOG_DEBUG, "Received CONNACK (%s, %s), attempting to use non-empty CID", flags, result)
+                        self._client_id = base62((uuid.uuid4().int), padding=22)
+                        return self.reconnect()
+            if result == 0:
+                self._state = mqtt_cs_connected
+                self._reconnect_delay = None
+            elif self._protocol == MQTTv5:
+                self._easy_log(MQTT_LOG_DEBUG, "Received CONNACK (%s, %s) properties=%s", flags, reason, properties)
+            else:
+                self._easy_log(MQTT_LOG_DEBUG, "Received CONNACK (%s, %s)", flags, result)
+            self._mqttv5_first_connect = False
+            with self._callback_mutex:
+                on_connect = self.on_connect
+            if on_connect:
+                flags_dict = {}
+                flags_dict["session present"] = flags & 1
+                with self._in_callback_mutex:
+                    try:
+                        if self._protocol == MQTTv5:
+                            on_connect(self, self._userdata, flags_dict, reason, properties)
+                        else:
+                            on_connect(self, self._userdata, flags_dict, result)
+                    except Exception as err:
+                        try:
+                            self._easy_log(MQTT_LOG_ERR, "Caught exception in on_connect: %s", err)
+                            if not self.suppress_exceptions:
+                                raise
+                        finally:
+                            err = None
+                            del err
+
+            if result == 0:
+                rc = 0
+                with self._out_message_mutex:
+                    for m in self._out_messages.values():
+                        m.timestamp = time_func()
+                        if m.state == mqtt_ms_queued:
+                            self.loop_write()
+                            return MQTT_ERR_SUCCESS
+                            if m.qos == 0:
+                                with self._in_callback_mutex:
+                                    rc = self._send_publish((m.mid),
+                                      (m.topic.encode("utf-8")),
+                                      (m.payload),
+                                      (m.qos),
+                                      (m.retain),
+                                      (m.dup),
+                                      properties=(m.properties))
+                                if rc != 0:
+                                    return rc
+                        elif m.qos == 1:
+                            if m.state == mqtt_ms_publish:
+                                self._inflight_messages += 1
+                                m.state = mqtt_ms_wait_for_puback
+                                with self._in_callback_mutex:
+                                    rc = self._send_publish((m.mid),
+                                      (m.topic.encode("utf-8")),
+                                      (m.payload),
+                                      (m.qos),
+                                      (m.retain),
+                                      (m.dup),
+                                      properties=(m.properties))
+                                if rc != 0:
+                                    return rc
+                        elif m.qos == 2:
+                            if m.state == mqtt_ms_publish:
+                                self._inflight_messages += 1
+                                m.state = mqtt_ms_wait_for_pubrec
+                                with self._in_callback_mutex:
+                                    rc = self._send_publish((m.mid),
+                                      (m.topic.encode("utf-8")),
+                                      (m.payload),
+                                      (m.qos),
+                                      (m.retain),
+                                      (m.dup),
+                                      properties=(m.properties))
+                                if rc != 0:
+                                    return rc
+                            elif m.state == mqtt_ms_resend_pubrel:
+                                self._inflight_messages += 1
+                                m.state = mqtt_ms_wait_for_pubcomp
+                                with self._in_callback_mutex:
+                                    rc = self._send_pubrel(m.mid)
+                                if rc != 0:
+                                    return rc
+                        self.loop_write()
+
+                return rc
+            if result > 0 and result < 6:
+                return MQTT_ERR_CONN_REFUSED
+        return MQTT_ERR_PROTOCOL
+
+    def _handle_disconnect(self):
+        packet_type = DISCONNECT >> 4
+        reasonCode = properties = None
+        if self._in_packet["remaining_length"] > 2:
+            reasonCode = ReasonCodes(packet_type)
+            reasonCode.unpack(self._in_packet["packet"])
+            if self._in_packet["remaining_length"] > 3:
+                properties = Properties(packet_type)
+                props, props_len = properties.unpack(self._in_packet["packet"][1[:None]])
+        self._easy_log(MQTT_LOG_DEBUG, "Received DISCONNECT %s %s", reasonCode, properties)
+        self._loop_rc_handle(reasonCode, properties)
+        return MQTT_ERR_SUCCESS
+
+    def _handle_suback(self):
+        self._easy_log(MQTT_LOG_DEBUG, "Received SUBACK")
+        pack_format = "!H" + str(len(self._in_packet["packet"]) - 2) + "s"
+        mid, packet = struct.unpack(pack_format, self._in_packet["packet"])
+        if self._protocol == MQTTv5:
+            properties = Properties(SUBACK >> 4)
+            props, props_len = properties.unpack(packet)
+            reasoncodes = []
+            for c in packet[props_len[:None]]:
+                if sys.version_info[0] < 3:
+                    c = ord(c)
+                reasoncodes.append(ReasonCodes((SUBACK >> 4), identifier=c))
+
+        else:
+            pack_format = "!" + "B" * len(packet)
+            granted_qos = struct.unpack(pack_format, packet)
+        with self._callback_mutex:
+            on_subscribe = self.on_subscribe
+        if on_subscribe:
+            with self._in_callback_mutex:
+                try:
+                    if self._protocol == MQTTv5:
+                        on_subscribe(self, self._userdata, mid, reasoncodes, properties)
+                    else:
+                        on_subscribe(self, self._userdata, mid, granted_qos)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in on_subscribe: %s", err)
+                        if not self.suppress_exceptions:
+                            raise
+                    finally:
+                        err = None
+                        del err
+
+        return MQTT_ERR_SUCCESS
+
+    def _handle_publish(self):
+        rc = 0
+        header = self._in_packet["command"]
+        message = MQTTMessage()
+        message.dup = (header & 8) >> 3
+        message.qos = (header & 6) >> 1
+        message.retain = header & 1
+        pack_format = "!H" + str(len(self._in_packet["packet"]) - 2) + "s"
+        slen, packet = struct.unpack(pack_format, self._in_packet["packet"])
+        pack_format = "!" + str(slen) + "s" + str(len(packet) - slen) + "s"
+        topic, packet = struct.unpack(pack_format, packet)
+        if self._protocol != MQTTv5:
+            if len(topic) == 0:
+                return MQTT_ERR_PROTOCOL
+        try:
+            print_topic = topic.decode("utf-8")
+        except UnicodeDecodeError:
+            print_topic = "TOPIC WITH INVALID UTF-8: " + str(topic)
+
+        message.topic = topic
+        if message.qos > 0:
+            pack_format = "!H" + str(len(packet) - 2) + "s"
+            message.mid, packet = struct.unpack(pack_format, packet)
+        elif self._protocol == MQTTv5:
+            message.properties = Properties(PUBLISH >> 4)
+            props, props_len = message.properties.unpack(packet)
+            packet = packet[props_len[:None]]
+        message.payload = packet
+        if self._protocol == MQTTv5:
+            self._easy_logMQTT_LOG_DEBUG"Received PUBLISH (d%d, q%d, r%d, m%d), '%s', properties=%s, ...  (%d bytes)"message.dupmessage.qosmessage.retainmessage.midprint_topicmessage.propertieslen(message.payload)
+        else:
+            self._easy_log(MQTT_LOG_DEBUG, "Received PUBLISH (d%d, q%d, r%d, m%d), '%s', ...  (%d bytes)", message.dup, message.qos, message.retain, message.mid, print_topic, len(message.payload))
+        message.timestamp = time_func()
+        if message.qos == 0:
+            self._handle_on_message(message)
+            return MQTT_ERR_SUCCESS
+        if message.qos == 1:
+            self._handle_on_message(message)
+            return self._send_puback(message.mid)
+        if message.qos == 2:
+            rc = self._send_pubrec(message.mid)
+            message.state = mqtt_ms_wait_for_pubrel
+            with self._in_message_mutex:
+                self._in_messages[message.mid] = message
+            return rc
+        return MQTT_ERR_PROTOCOL
+
+    def _handle_pubrel(self):
+        if self._protocol == MQTTv5:
+            if self._in_packet["remaining_length"] < 2:
+                return MQTT_ERR_PROTOCOL
+        elif self._in_packet["remaining_length"] != 2:
+            return MQTT_ERR_PROTOCOL
+        mid, = struct.unpack("!H", self._in_packet["packet"])
+        self._easy_log(MQTT_LOG_DEBUG, "Received PUBREL (Mid: %d)", mid)
+        with self._in_message_mutex:
+            if mid in self._in_messages:
+                message = self._in_messages.pop(mid)
+                self._handle_on_message(message)
+                self._inflight_messages -= 1
+                if self._max_inflight_messages > 0:
+                    with self._out_message_mutex:
+                        rc = self._update_inflight()
+                    if rc != MQTT_ERR_SUCCESS:
+                        return rc
+        return self._send_pubcomp(mid)
+
+    def _update_inflight(self):
+        for m in self._out_messages.values():
+            if self._inflight_messages < self._max_inflight_messages:
+                if m.qos > 0 and m.state == mqtt_ms_queued:
+                    self._inflight_messages += 1
+                    if m.qos == 1:
+                        m.state = mqtt_ms_wait_for_puback
+                    else:
+                        if m.qos == 2:
+                            m.state = mqtt_ms_wait_for_pubrec
+                    rc = self._send_publish((m.mid),
+                      (m.topic.encode("utf-8")),
+                      (m.payload),
+                      (m.qos),
+                      (m.retain),
+                      (m.dup),
+                      properties=(m.properties))
+                    if rc != 0:
+                        return rc
+            else:
+                return MQTT_ERR_SUCCESS
+
+        return MQTT_ERR_SUCCESS
+
+    def _handle_pubrec(self):
+        if self._protocol == MQTTv5:
+            if self._in_packet["remaining_length"] < 2:
+                return MQTT_ERR_PROTOCOL
+        elif self._in_packet["remaining_length"] != 2:
+            return MQTT_ERR_PROTOCOL
+        mid, = struct.unpack("!H", self._in_packet["packet"][None[:2]])
+        if self._protocol == MQTTv5:
+            if self._in_packet["remaining_length"] > 2:
+                reasonCode = ReasonCodes(PUBREC >> 4)
+                reasonCode.unpack(self._in_packet["packet"][2[:None]])
+                if self._in_packet["remaining_length"] > 3:
+                    properties = Properties(PUBREC >> 4)
+                    props, props_len = properties.unpack(self._in_packet["packet"][3[:None]])
+        self._easy_log(MQTT_LOG_DEBUG, "Received PUBREC (Mid: %d)", mid)
+        with self._out_message_mutex:
+            if mid in self._out_messages:
+                msg = self._out_messages[mid]
+                msg.state = mqtt_ms_wait_for_pubcomp
+                msg.timestamp = time_func()
+                return self._send_pubrel(mid)
+        return MQTT_ERR_SUCCESS
+
+    def _handle_unsuback(self):
+        if self._protocol == MQTTv5:
+            if self._in_packet["remaining_length"] < 4:
+                return MQTT_ERR_PROTOCOL
+        elif self._in_packet["remaining_length"] != 2:
+            return MQTT_ERR_PROTOCOL
+        mid, = struct.unpack("!H", self._in_packet["packet"][None[:2]])
+        if self._protocol == MQTTv5:
+            packet = self._in_packet["packet"][2[:None]]
+            properties = Properties(UNSUBACK >> 4)
+            props, props_len = properties.unpack(packet)
+            reasoncodes = []
+            for c in packet[props_len[:None]]:
+                if sys.version_info[0] < 3:
+                    c = ord(c)
+                reasoncodes.append(ReasonCodes((UNSUBACK >> 4), identifier=c))
+
+            if len(reasoncodes) == 1:
+                reasoncodes = reasoncodes[0]
+        self._easy_log(MQTT_LOG_DEBUG, "Received UNSUBACK (Mid: %d)", mid)
+        with self._callback_mutex:
+            on_unsubscribe = self.on_unsubscribe
+        if on_unsubscribe:
+            with self._in_callback_mutex:
+                try:
+                    if self._protocol == MQTTv5:
+                        on_unsubscribe(self, self._userdata, mid, properties, reasoncodes)
+                    else:
+                        on_unsubscribe(self, self._userdata, mid)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in on_unsubscribe: %s", err)
+                        if not self.suppress_exceptions:
+                            raise
+                    finally:
+                        err = None
+                        del err
+
+        return MQTT_ERR_SUCCESS
+
+    def _do_on_disconnect(self, rc, properties=None):
+        with self._callback_mutex:
+            on_disconnect = self.on_disconnect
+        if on_disconnect:
+            with self._in_callback_mutex:
+                try:
+                    if self._protocol == MQTTv5:
+                        on_disconnect(self, self._userdata, rc, properties)
+                    else:
+                        on_disconnect(self, self._userdata, rc)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in on_disconnect: %s", err)
+                        if not self.suppress_exceptions:
+                            raise
+                    finally:
+                        err = None
+                        del err
+
+    def _do_on_publish(self, mid):
+        with self._callback_mutex:
+            on_publish = self.on_publish
+        if on_publish:
+            with self._in_callback_mutex:
+                try:
+                    on_publish(self, self._userdata, mid)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in on_publish: %s", err)
+                        if not self.suppress_exceptions:
+                            raise
+                    finally:
+                        err = None
+                        del err
+
+        msg = self._out_messages.pop(mid)
+        msg.info._set_as_published()
+        if msg.qos > 0:
+            self._inflight_messages -= 1
+            if self._max_inflight_messages > 0:
+                rc = self._update_inflight()
+                if rc != MQTT_ERR_SUCCESS:
+                    return rc
+        return MQTT_ERR_SUCCESS
+
+    def _handle_pubackcomp(self, cmd):
+        if self._protocol == MQTTv5:
+            if self._in_packet["remaining_length"] < 2:
+                return MQTT_ERR_PROTOCOL
+        elif self._in_packet["remaining_length"] != 2:
+            return MQTT_ERR_PROTOCOL
+        packet_type = PUBACK if cmd == "PUBACK" else PUBCOMP
+        packet_type = packet_type >> 4
+        mid, = struct.unpack("!H", self._in_packet["packet"][None[:2]])
+        if self._protocol == MQTTv5:
+            if self._in_packet["remaining_length"] > 2:
+                reasonCode = ReasonCodes(packet_type)
+                reasonCode.unpack(self._in_packet["packet"][2[:None]])
+                if self._in_packet["remaining_length"] > 3:
+                    properties = Properties(packet_type)
+                    props, props_len = properties.unpack(self._in_packet["packet"][3[:None]])
+        self._easy_log(MQTT_LOG_DEBUG, "Received %s (Mid: %d)", cmd, mid)
+        with self._out_message_mutex:
+            if mid in self._out_messages:
+                rc = self._do_on_publish(mid)
+                return rc
+        return MQTT_ERR_SUCCESS
+
+    def _handle_on_message(self, message):
+        matched = False
+        try:
+            topic = message.topic
+        except UnicodeDecodeError:
+            topic = None
+
+        on_message_callbacks = []
+        with self._callback_mutex:
+            if topic is not None:
+                for callback in self._on_message_filtered.iter_match(message.topic):
+                    on_message_callbacks.append(callback)
+
+            elif len(on_message_callbacks) == 0:
+                on_message = self.on_message
+            else:
+                on_message = None
+        for callback in on_message_callbacks:
+            with self._in_callback_mutex:
+                try:
+                    callback(self, self._userdata, message)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in user defined callback function %s: %s", callback.__name__, err)
+                        if not self.suppress_exceptions:
+                            raise
+                    finally:
+                        err = None
+                        del err
+
+        if on_message:
+            with self._in_callback_mutex:
+                try:
+                    on_message(self, self._userdata, message)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in on_message: %s", err)
+                        if not self.suppress_exceptions:
+                            raise
+                    finally:
+                        err = None
+                        del err
+
+    def _handle_on_connect_fail(self):
+        with self._callback_mutex:
+            on_connect_fail = self.on_connect_fail
+        if on_connect_fail:
+            with self._in_callback_mutex:
+                try:
+                    on_connect_fail(self, self._userdata)
+                except Exception as err:
+                    try:
+                        self._easy_log(MQTT_LOG_ERR, "Caught exception in on_connect_fail: %s", err)
+                    finally:
+                        err = None
+                        del err
+
+    def _thread_main(self):
+        self.loop_forever(retry_first_connection=True)
+
+    def _reconnect_wait(self):
+        now = time_func()
+        with self._reconnect_delay_mutex:
+            if self._reconnect_delay is None:
+                self._reconnect_delay = self._reconnect_min_delay
+            else:
+                self._reconnect_delay = min(self._reconnect_delay * 2, self._reconnect_max_delay)
+            target_time = now + self._reconnect_delay
+        remaining = target_time - now
+        while self._state != mqtt_cs_disconnecting and not self._thread_terminate:
+            if remaining > 0:
+                time.sleep(min(remaining, 1))
+                remaining = target_time - time_func()
+
+    @staticmethod
+    def _proxy_is_valid(p):
+
+        def check(t, a):
+            return socks is not None and t in set([socks.HTTP, socks.SOCKS4, socks.SOCKS5]) and a
+
+        if isinstance(p, dict):
+            return check(p.get("proxy_type"), p.get("proxy_addr"))
+        if isinstance(p, (list, tuple)):
+            return len(p) == 6 and check(p[0], p[1])
+        return False
+
+    def _get_proxy(self):
+        if socks is None:
+            return
+            if self._proxy_is_valid(self._proxy):
+                return self._proxy
+        else:
+            if not (hasattr(urllib_dot_request, "proxy_bypass") and urllib_dot_request.proxy_bypass(self._host)):
+                env_proxies = urllib_dot_request.getproxies()
+                if "mqtt" in env_proxies:
+                    parts = urllib_dot_parse.urlparse(env_proxies["mqtt"])
+                    if parts.scheme == "http":
+                        proxy = {'proxy_type':socks.HTTP,  'proxy_addr':parts.hostname, 
+                         'proxy_port':parts.port}
+                        return proxy
+                    if parts.scheme == "socks":
+                        proxy = {'proxy_type':socks.SOCKS5,  'proxy_addr':parts.hostname, 
+                         'proxy_port':parts.port}
+                        return proxy
+        socks_default = socks.get_default_proxy()
+        if self._proxy_is_valid(socks_default):
+            proxy_keys = ('proxy_type', 'proxy_addr', 'proxy_port', 'proxy_rdns', 'proxy_username',
+                          'proxy_password')
+            return dict(zip(proxy_keys, socks_default))
+
+    def _create_socket_connection(self):
+        proxy = self._get_proxy()
+        addr = (self._host, self._port)
+        source = (self._bind_address, self._bind_port)
+        if sys.version_info < (2, 7) or (3, 0) < sys.version_info < (3, 2):
+            return socket.create_connection(addr, timeout=(self._connect_timeout))
+        if proxy:
+            return (socks.create_connection)(addr, timeout=self._connect_timeout, source_address=source, **proxy)
+        return socket.create_connection(addr, timeout=(self._connect_timeout), source_address=source)
+
+
+class WebsocketWrapper(object):
+    OPCODE_CONTINUATION = 0
+    OPCODE_TEXT = 1
+    OPCODE_BINARY = 2
+    OPCODE_CONNCLOSE = 8
+    OPCODE_PING = 9
+    OPCODE_PONG = 10
+
+    def __init__(self, socket, host, port, is_ssl, path, extra_headers):
+        self.connected = False
+        self._ssl = is_ssl
+        self._host = host
+        self._port = port
+        self._socket = socket
+        self._path = path
+        self._sendbuffer = bytearray()
+        self._readbuffer = bytearray()
+        self._requested_size = 0
+        self._payload_head = 0
+        self._readbuffer_head = 0
+        self._do_handshake(extra_headers)
+
+    def __del__(self):
+        self._sendbuffer = None
+        self._readbuffer = None
+
+    def _do_handshake(self, extra_headers):
+        sec_websocket_key = uuid.uuid4().bytes
+        sec_websocket_key = base64.b64encode(sec_websocket_key)
+        websocket_headers = {'Host':"{self._host:s}:{self._port:d}".format(self=self), 
+         'Upgrade':"websocket", 
+         'Connection':"Upgrade", 
+         'Origin':"https://{self._host:s}:{self._port:d}".format(self=self), 
+         'Sec-WebSocket-Key':(sec_websocket_key.decode)("utf8"), 
+         'Sec-Websocket-Version':"13", 
+         'Sec-Websocket-Protocol':"mqtt"}
+        if isinstance(extra_headers, dict):
+            websocket_headers.update(extra_headers)
+        else:
+            if callable(extra_headers):
+                websocket_headers = extra_headers(websocket_headers)
+            else:
+                header = "\r\n".join([
+                 "GET {self._path} HTTP/1.1".format(self=self),
+                 "\r\n".join(("{}: {}".format(i, j) for i, j in websocket_headers.items())),
+                 "\r\n"]).encode("utf8")
+                self._socket.send(header)
+                has_secret = False
+                has_upgrade = False
+                while 1:
+                    byte = self._socket.recv(1)
+                    self._readbuffer.extend(byte)
+                    if byte == b'\n':
+                        if len(self._readbuffer) > 2:
+                            if b'connection' in str(self._readbuffer).lower().encode("utf-8"):
+                                if b'upgrade' not in str(self._readbuffer).lower().encode("utf-8"):
+                                    raise WebsocketConnectionError("WebSocket handshake error, connection not upgraded")
+                                else:
+                                    has_upgrade = True
+                            if b'sec-websocket-accept' in str(self._readbuffer).lower().encode("utf-8"):
+                                GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
+                                server_hash = self._readbuffer.decode("utf-8").split(": ", 1)[1]
+                                server_hash = server_hash.strip().encode("utf-8")
+                                client_hash = sec_websocket_key.decode("utf-8") + GUID
+                                client_hash = hashlib.sha1(client_hash.encode("utf-8"))
+                                client_hash = base64.b64encode(client_hash.digest())
+                                if server_hash != client_hash:
+                                    raise WebsocketConnectionError("WebSocket handshake error, invalid secret key")
+                                else:
+                                    has_secret = True
+                        else:
+                            break
+                        self._readbuffer = bytearray()
+                    elif not byte:
+                        raise WebsocketConnectionError("WebSocket handshake error")
+
+                raise has_upgrade and has_secret or WebsocketConnectionError("WebSocket handshake error")
+            self._readbuffer = bytearray()
+            self.connected = True
+
+    def _create_frame(self, opcode, data, do_masking=1):
+        header = bytearray()
+        length = len(data)
+        mask_key = bytearray(os.urandom(4))
+        mask_flag = do_masking
+        header.append(128 | opcode)
+        if length < 126:
+            header.append(mask_flag << 7 | length)
+        else:
+            if length < 65536:
+                header.append(mask_flag << 7 | 126)
+                header += struct.pack("!H", length)
+            else:
+                if length < 9223372036854775809L:
+                    header.append(mask_flag << 7 | 127)
+                    header += struct.pack("!Q", length)
+                else:
+                    raise ValueError("Maximum payload size is 2^63")
+        if mask_flag == 1:
+            for index in range(length):
+                data[index] ^= mask_key[index % 4]
+
+            data = mask_key + data
+        return header + data
+
+    def _buffered_read(self, length):
+        wanted_bytes = length - (len(self._readbuffer) - self._readbuffer_head)
+        if wanted_bytes > 0:
+            data = self._socket.recv(wanted_bytes)
+            if not data:
+                raise ConnectionAbortedError
+            else:
+                self._readbuffer.extend(data)
+            if len(data) < wanted_bytes:
+                raise BlockingIOError
+        self._readbuffer_head += length
+        return self._readbuffer[(self._readbuffer_head - length)[:self._readbuffer_head]]
+
+    def _recv_impl(self, length):
+        try:
+            self._readbuffer_head = 0
+            result = None
+            chunk_startindex = self._payload_head
+            chunk_endindex = self._payload_head + length
+            header1 = self._buffered_read(1)
+            header2 = self._buffered_read(1)
+            opcode = header1[0] & 15
+            maskbit = header2[0] & 128 == 128
+            lengthbits = header2[0] & 127
+            payload_length = lengthbits
+            mask_key = None
+            if lengthbits == 126:
+                value = self._buffered_read(2)
+                payload_length, = struct.unpack("!H", value)
+            else:
+                if lengthbits == 127:
+                    value = self._buffered_read(8)
+                    payload_length, = struct.unpack("!Q", value)
+                else:
+                    if maskbit:
+                        mask_key = self._buffered_read(4)
+                    else:
+                        readindex = chunk_endindex
+                        if payload_length < readindex:
+                            readindex = payload_length
+                        elif readindex > 0:
+                            payload = self._buffered_read(readindex)
+                            if maskbit:
+                                for index in range(chunk_startindex, readindex):
+                                    payload[index] ^= mask_key[index % 4]
+
+                            result = payload[chunk_startindex[:readindex]]
+                            self._payload_head = readindex
+                        else:
+                            payload = bytearray()
+                        if readindex == payload_length:
+                            self._readbuffer = bytearray()
+                            self._payload_head = 0
+                            if opcode == WebsocketWrapper.OPCODE_CONNCLOSE:
+                                frame = self._create_frame(WebsocketWrapper.OPCODE_CONNCLOSE, payload, 0)
+                                self._socket.send(frame)
+                            if opcode == WebsocketWrapper.OPCODE_PING:
+                                frame = self._create_frame(WebsocketWrapper.OPCODE_PONG, payload, 0)
+                                self._socket.send(frame)
+                    if opcode == WebsocketWrapper.OPCODE_BINARY or opcode == WebsocketWrapper.OPCODE_CONTINUATION:
+                        if payload_length > 0:
+                            return result
+            raise BlockingIOError
+        except ConnectionError:
+            self.connected = False
+            return b''
+
+    def _send_impl(self, data):
+        if len(self._sendbuffer) == 0:
+            frame = self._create_frame(WebsocketWrapper.OPCODE_BINARY, bytearray(data))
+            self._sendbuffer.extend(frame)
+            self._requested_size = len(data)
+        length = self._socket.send(self._sendbuffer)
+        self._sendbuffer = self._sendbuffer[length[:None]]
+        if len(self._sendbuffer) == 0:
+            return self._requested_size
+        return 0
+
+    def recv(self, length):
+        return self._recv_impl(length)
+
+    def read(self, length):
+        return self._recv_impl(length)
+
+    def send(self, data):
+        return self._send_impl(data)
+
+    def write(self, data):
+        return self._send_impl(data)
+
+    def close(self):
+        self._socket.close()
+
+    def fileno(self):
+        return self._socket.fileno()
+
+    def pending(self):
+        if self._ssl:
+            return self._socket.pending()
+        return 0
+
+    def setblocking(self, flag):
+        self._socket.setblocking(flag)
\ No newline at end of file
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/matcher.py b/APPS_UNCOMPILED/lib/paho/mqtt/matcher.py
new file mode 100644
index 0000000..df68a69
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/matcher.py
@@ -0,0 +1,91 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/matcher.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 2771 bytes
+
+
+class MQTTMatcher(object):
+    __doc__ = "Intended to manage topic filters including wildcards.\n\n    Internally, MQTTMatcher use a prefix tree (trie) to store\n    values associated with filters, and has an iter_match()\n    method to iterate efficiently over all filters that match\n    some topic name."
+
+    class Node(object):
+        __slots__ = ('_children', '_content')
+
+        def __init__(self):
+            self._children = {}
+            self._content = None
+
+    def __init__(self):
+        self._root = self.Node()
+
+    def __setitem__(self, key, value):
+        """Add a topic filter :key to the prefix tree
+        and associate it to :value"""
+        node = self._root
+        for sym in key.split("/"):
+            node = node._children.setdefault(sym, self.Node())
+
+        node._content = value
+
+    def __getitem__(self, key):
+        """Retrieve the value associated with some topic filter :key"""
+        try:
+            node = self._root
+            for sym in key.split("/"):
+                node = node._children[sym]
+
+            if node._content is None:
+                raise KeyError(key)
+            return node._content
+        except KeyError:
+            raise KeyError(key)
+
+    def __delitem__(self, key):
+        """Delete the value associated with some topic filter :key"""
+        lst = []
+        try:
+            parent, node = None, self._root
+            for k in key.split("/"):
+                parent, node = node, node._children[k]
+                lst.append((parent, k, node))
+
+            node._content = None
+        except KeyError:
+            raise KeyError(key)
+        else:
+            for parent, k, node in reversed(lst):
+                if not node._children:
+                    if node._content is not None:
+                        break
+                    del parent._children[k]
+
+    def iter_match(self, topic):
+        """Return an iterator on all values associated with filters
+        that match the :topic"""
+        lst = topic.split("/")
+        normal = not topic.startswith("$")
+
+        def rec(node, i=0):
+            if i == len(lst):
+                if node._content is not None:
+                    yield node._content
+                else:
+                    part = lst[i]
+                    if part in node._children:
+                        for content in rec(node._children[part], i + 1):
+                            yield content
+
+                    if "+" in node._children:
+                        if normal or i > 0:
+                            for content in rec(node._children["+"], i + 1):
+                                yield content
+
+            elif "#" in node._children and not normal:
+                if i > 0:
+                    content = node._children["#"]._content
+                    if content is not None:
+                        yield content
+
+        return rec(self._root)
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/packettypes.py b/APPS_UNCOMPILED/lib/paho/mqtt/packettypes.py
new file mode 100644
index 0000000..db5cb1b
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/packettypes.py
@@ -0,0 +1,38 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/packettypes.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 1453 bytes
+"""
+*******************************************************************
+  Copyright (c) 2017, 2019 IBM Corp.
+
+  All rights reserved. This program and the accompanying materials
+  are made available under the terms of the Eclipse Public License v2.0
+  and Eclipse Distribution License v1.0 which accompany this distribution.
+
+  The Eclipse Public License is available at
+     http://www.eclipse.org/legal/epl-v10.html
+  and the Eclipse Distribution License is available at
+    http://www.eclipse.org/org/documents/edl-v10.php.
+
+  Contributors:
+     Ian Craggs - initial implementation and/or documentation
+*******************************************************************
+"""
+
+class PacketTypes:
+    __doc__ = "\n    Packet types class.  Includes the AUTH packet for MQTT v5.0.\n\n    Holds constants for each packet type such as PacketTypes.PUBLISH\n    and packet name strings: PacketTypes.Names[PacketTypes.PUBLISH].\n\n    "
+    indexes = range(1, 16)
+    CONNECT, CONNACK, PUBLISH, PUBACK, PUBREC, PUBREL, PUBCOMP, SUBSCRIBE, SUBACK, UNSUBSCRIBE, UNSUBACK, PINGREQ, PINGRESP, DISCONNECT, AUTH = indexes
+    WILLMESSAGE = 99
+    Names = [
+     'reserved', 
+     'Connect', 'Connack', 'Publish', 'Puback', 
+     'Pubrec', 'Pubrel', 
+     'Pubcomp', 'Subscribe', 'Suback', 
+     'Unsubscribe', 'Unsuback', 
+     'Pingreq', 'Pingresp', 'Disconnect', 
+     'Auth']
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/properties.py b/APPS_UNCOMPILED/lib/paho/mqtt/properties.py
new file mode 100644
index 0000000..3a1a86c
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/properties.py
@@ -0,0 +1,455 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/properties.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 17815 bytes
+"""
+*******************************************************************
+  Copyright (c) 2017, 2019 IBM Corp.
+
+  All rights reserved. This program and the accompanying materials
+  are made available under the terms of the Eclipse Public License v2.0
+  and Eclipse Distribution License v1.0 which accompany this distribution.
+
+  The Eclipse Public License is available at
+     http://www.eclipse.org/legal/epl-v10.html
+  and the Eclipse Distribution License is available at
+    http://www.eclipse.org/org/documents/edl-v10.php.
+
+  Contributors:
+     Ian Craggs - initial implementation and/or documentation
+*******************************************************************
+"""
+import struct, sys
+from .packettypes import PacketTypes
+
+class MQTTException(Exception):
+    pass
+
+
+class MalformedPacket(MQTTException):
+    pass
+
+
+def writeInt16(length):
+    return bytearray(struct.pack("!H", length))
+
+
+def readInt16(buf):
+    return struct.unpack("!H", buf[None[:2]])[0]
+
+
+def writeInt32(length):
+    return bytearray(struct.pack("!L", length))
+
+
+def readInt32(buf):
+    return struct.unpack("!L", buf[None[:4]])[0]
+
+
+def writeUTF(data):
+    if sys.version_info[0] < 3:
+        data = bytearray(data, "utf-8")
+    else:
+        data = data if type(data) == type(b'') else bytes(data, "utf-8")
+    return writeInt16(len(data)) + data
+
+
+def readUTF(buffer, maxlen):
+    if maxlen >= 2:
+        length = readInt16(buffer)
+    else:
+        raise MalformedPacket("Not enough data to read string length")
+    maxlen -= 2
+    if length > maxlen:
+        raise MalformedPacket("Length delimited string too long")
+    buf = buffer[2[:2 + length]].decode("utf-8")
+    for c in buf:
+        ord_c = ord(c)
+        if ord_c >= 55296:
+            if ord_c <= 57343:
+                raise MalformedPacket("[MQTT-1.5.4-1] D800-DFFF found in UTF-8 data")
+        if ord_c == 0:
+            raise MalformedPacket("[MQTT-1.5.4-2] Null found in UTF-8 data")
+        if ord_c == 65279:
+            raise MalformedPacket("[MQTT-1.5.4-3] U+FEFF in UTF-8 data")
+
+    return (
+     buf, length + 2)
+
+
+def writeBytes(buffer):
+    return writeInt16(len(buffer)) + buffer
+
+
+def readBytes(buffer):
+    length = readInt16(buffer)
+    return (buffer[2[:2 + length]], length + 2)
+
+
+class VariableByteIntegers:
+    __doc__ = "\n    MQTT variable byte integer helper class.  Used\n    in several places in MQTT v5.0 properties.\n\n    "
+
+    @staticmethod
+    def encode(x):
+        """
+          Convert an integer 0 <= x <= 268435455 into multi-byte format.
+          Returns the buffer convered from the integer.
+        """
+        assert 0 <= x <= 268435455
+        buffer = b''
+        while 1:
+            digit = x % 128
+            x //= 128
+            if x > 0:
+                digit |= 128
+            elif sys.version_info[0] >= 3:
+                buffer += bytes([digit])
+            else:
+                buffer += bytes(chr(digit))
+            if x == 0:
+                break
+
+        return buffer
+
+    @staticmethod
+    def decode(buffer):
+        """
+          Get the value of a multi-byte integer from a buffer
+          Return the value, and the number of bytes used.
+
+          [MQTT-1.5.5-1] the encoded value MUST use the minimum number of bytes necessary to represent the value
+        """
+        multiplier = 1
+        value = 0
+        bytes = 0
+        while True:
+            bytes += 1
+            digit = buffer[0]
+            buffer = buffer[1[:None]]
+            value += (digit & 127) * multiplier
+            if digit & 128 == 0:
+                break
+            multiplier *= 128
+
+        return (
+         value, bytes)
+
+
+class Properties(object):
+    __doc__ = 'MQTT v5.0 properties class.\n\n    See Properties.names for a list of accepted property names along with their numeric values.\n\n    See Properties.properties for the data type of each property.\n\n    Example of use:\n\n        publish_properties = Properties(PacketTypes.PUBLISH)\n        publish_properties.UserProperty = ("a", "2")\n        publish_properties.UserProperty = ("c", "3")\n\n    First the object is created with packet type as argument, no properties will be present at\n    this point.  Then properties are added as attributes, the name of which is the string property\n    name without the spaces.\n\n    '
+
+    def __init__(self, packetType):
+        self.packetType = packetType
+        self.types = ['Byte', 'Two Byte Integer', 'Four Byte Integer', 'Variable Byte Integer', 
+         'Binary Data', 
+         'UTF-8 Encoded String', 'UTF-8 String Pair']
+        self.names = {
+         'Payload Format Indicator': 1, 
+         'Message Expiry Interval': 2, 
+         'Content Type': 3, 
+         'Response Topic': 8, 
+         'Correlation Data': 9, 
+         'Subscription Identifier': 11, 
+         'Session Expiry Interval': 17, 
+         'Assigned Client Identifier': 18, 
+         'Server Keep Alive': 19, 
+         'Authentication Method': 21, 
+         'Authentication Data': 22, 
+         'Request Problem Information': 23, 
+         'Will Delay Interval': 24, 
+         'Request Response Information': 25, 
+         'Response Information': 26, 
+         'Server Reference': 28, 
+         'Reason String': 31, 
+         'Receive Maximum': 33, 
+         'Topic Alias Maximum': 34, 
+         'Topic Alias': 35, 
+         'Maximum QoS': 36, 
+         'Retain Available': 37, 
+         'User Property': 38, 
+         'Maximum Packet Size': 39, 
+         'Wildcard Subscription Available': 40, 
+         'Subscription Identifier Available': 41, 
+         'Shared Subscription Available': 42}
+        self.properties = {1:(
+          self.types.index("Byte"), [PacketTypes.PUBLISH, PacketTypes.WILLMESSAGE]), 
+         2:(
+          self.types.index("Four Byte Integer"), [PacketTypes.PUBLISH, PacketTypes.WILLMESSAGE]), 
+         3:(
+          self.types.index("UTF-8 Encoded String"), [PacketTypes.PUBLISH, PacketTypes.WILLMESSAGE]), 
+         8:(
+          self.types.index("UTF-8 Encoded String"), [PacketTypes.PUBLISH, PacketTypes.WILLMESSAGE]), 
+         9:(
+          self.types.index("Binary Data"), [PacketTypes.PUBLISH, PacketTypes.WILLMESSAGE]), 
+         11:(
+          self.types.index("Variable Byte Integer"),
+          [
+           PacketTypes.PUBLISH, PacketTypes.SUBSCRIBE]), 
+         17:(
+          self.types.index("Four Byte Integer"),
+          [
+           PacketTypes.CONNECT, PacketTypes.CONNACK, PacketTypes.DISCONNECT]), 
+         18:(
+          self.types.index("UTF-8 Encoded String"), [PacketTypes.CONNACK]), 
+         19:(
+          self.types.index("Two Byte Integer"), [PacketTypes.CONNACK]), 
+         21:(
+          self.types.index("UTF-8 Encoded String"),
+          [
+           PacketTypes.CONNECT, PacketTypes.CONNACK, PacketTypes.AUTH]), 
+         22:(
+          self.types.index("Binary Data"),
+          [
+           PacketTypes.CONNECT, PacketTypes.CONNACK, PacketTypes.AUTH]), 
+         23:(
+          self.types.index("Byte"),
+          [
+           PacketTypes.CONNECT]), 
+         24:(
+          self.types.index("Four Byte Integer"), [PacketTypes.WILLMESSAGE]), 
+         25:(
+          self.types.index("Byte"), [PacketTypes.CONNECT]), 
+         26:(
+          self.types.index("UTF-8 Encoded String"), [PacketTypes.CONNACK]), 
+         28:(
+          self.types.index("UTF-8 Encoded String"),
+          [
+           PacketTypes.CONNACK, PacketTypes.DISCONNECT]), 
+         31:(
+          self.types.index("UTF-8 Encoded String"),
+          [
+           PacketTypes.CONNACK, PacketTypes.PUBACK, PacketTypes.PUBREC,
+           PacketTypes.PUBREL, PacketTypes.PUBCOMP, PacketTypes.SUBACK,
+           PacketTypes.UNSUBACK, PacketTypes.DISCONNECT, PacketTypes.AUTH]), 
+         33:(
+          self.types.index("Two Byte Integer"),
+          [
+           PacketTypes.CONNECT, PacketTypes.CONNACK]), 
+         34:(
+          self.types.index("Two Byte Integer"),
+          [
+           PacketTypes.CONNECT, PacketTypes.CONNACK]), 
+         35:(
+          self.types.index("Two Byte Integer"), [PacketTypes.PUBLISH]), 
+         36:(
+          self.types.index("Byte"), [PacketTypes.CONNACK]), 
+         37:(
+          self.types.index("Byte"), [PacketTypes.CONNACK]), 
+         38:(
+          self.types.index("UTF-8 String Pair"),
+          [
+           PacketTypes.CONNECT, PacketTypes.CONNACK,
+           PacketTypes.PUBLISH, PacketTypes.PUBACK,
+           PacketTypes.PUBREC, PacketTypes.PUBREL, PacketTypes.PUBCOMP,
+           PacketTypes.SUBSCRIBE, PacketTypes.SUBACK,
+           PacketTypes.UNSUBSCRIBE, PacketTypes.UNSUBACK,
+           PacketTypes.DISCONNECT, PacketTypes.AUTH, PacketTypes.WILLMESSAGE]), 
+         39:(
+          self.types.index("Four Byte Integer"),
+          [
+           PacketTypes.CONNECT, PacketTypes.CONNACK]), 
+         40:(
+          self.types.index("Byte"), [PacketTypes.CONNACK]), 
+         41:(
+          self.types.index("Byte"), [PacketTypes.CONNACK]), 
+         42:(
+          self.types.index("Byte"), [PacketTypes.CONNACK])}
+
+    def allowsMultiple(self, compressedName):
+        return self.getIdentFromName(compressedName) in (11, 38)
+
+    def getIdentFromName(self, compressedName):
+        result = -1
+        for name in self.names.keys():
+            if compressedName == name.replace(" ", ""):
+                result = self.names[name]
+                break
+
+        return result
+
+    def __setattr__(self, name, value):
+        name = name.replace(" ", "")
+        privateVars = ["packetType", "types", "names", "properties"]
+        if name in privateVars:
+            object.__setattr__(self, name, value)
+        else:
+            if name not in [aname.replace(" ", "") for aname in self.names.keys()]:
+                raise MQTTException("Property name must be one of " + str(self.names.keys()))
+            if self.packetType not in self.properties[self.getIdentFromName(name)][1]:
+                raise MQTTException("Property %s does not apply to packet type %s" % (
+                 name, PacketTypes.Names[self.packetType]))
+            if not type(value) != type([]) or name in ('ReceiveMaximum', 'TopicAlias') and (value < 1 or value > 65535):
+                raise MQTTException("%s property value must be in the range 1-65535" % name)
+            else:
+                if not name in ('TopicAliasMaximum', ) or value < 0 or value > 65535:
+                    raise MQTTException("%s property value must be in the range 0-65535" % name)
+                else:
+                    if not name in ('MaximumPacketSize', 'SubscriptionIdentifier') or value < 1 or value > 268435455:
+                        raise MQTTException("%s property value must be in the range 1-268435455" % name)
+                    else:
+                        if name in ('RequestResponseInformation', 'RequestProblemInformation',
+                                    'PayloadFormatIndicator'):
+                            if value != 0:
+                                if value != 1:
+                                    raise MQTTException("%s property value must be 0 or 1" % name)
+                        if self.allowsMultiple(name):
+                            if type(value) != type([]):
+                                value = [
+                                 value]
+                            if hasattr(self, name):
+                                value = object.__getattribute__(self, name) + value
+                        object.__setattr__(self, name, value)
+
+    def __str__(self):
+        buffer = "["
+        first = True
+        for name in self.names.keys():
+            compressedName = name.replace(" ", "")
+            if hasattr(self, compressedName):
+                if not first:
+                    buffer += ", "
+                buffer += compressedName + " : " + str(getattr(self, compressedName))
+                first = False
+
+        buffer += "]"
+        return buffer
+
+    def json(self):
+        data = {}
+        for name in self.names.keys():
+            compressedName = name.replace(" ", "")
+            if hasattr(self, compressedName):
+                val = getattr(self, compressedName)
+                if compressedName == "CorrelationData":
+                    if isinstance(val, bytes):
+                        data[compressedName] = val.hex()
+                data[compressedName] = val
+
+        return data
+
+    def isEmpty(self):
+        rc = True
+        for name in self.names.keys():
+            compressedName = name.replace(" ", "")
+            if hasattr(self, compressedName):
+                rc = False
+                break
+
+        return rc
+
+    def clear(self):
+        for name in self.names.keys():
+            compressedName = name.replace(" ", "")
+            if hasattr(self, compressedName):
+                delattr(self, compressedName)
+
+    def writeProperty(self, identifier, type, value):
+        buffer = b''
+        buffer += VariableByteIntegers.encode(identifier)
+        if type == self.types.index("Byte"):
+            if sys.version_info[0] < 3:
+                buffer += chr(value)
+            else:
+                buffer += bytes([value])
+        else:
+            if type == self.types.index("Two Byte Integer"):
+                buffer += writeInt16(value)
+            else:
+                if type == self.types.index("Four Byte Integer"):
+                    buffer += writeInt32(value)
+                else:
+                    if type == self.types.index("Variable Byte Integer"):
+                        buffer += VariableByteIntegers.encode(value)
+                    else:
+                        if type == self.types.index("Binary Data"):
+                            buffer += writeBytes(value)
+                        else:
+                            if type == self.types.index("UTF-8 Encoded String"):
+                                buffer += writeUTF(value)
+                            else:
+                                if type == self.types.index("UTF-8 String Pair"):
+                                    buffer += writeUTF(value[0]) + writeUTF(value[1])
+                                return buffer
+
+    def pack(self):
+        buffer = b''
+        for name in self.names.keys():
+            compressedName = name.replace(" ", "")
+            if hasattr(self, compressedName):
+                identifier = self.getIdentFromName(compressedName)
+                attr_type = self.properties[identifier][0]
+                if self.allowsMultiple(compressedName):
+                    for prop in getattr(self, compressedName):
+                        buffer += self.writeProperty(identifier, attr_type, prop)
+
+                else:
+                    buffer += self.writeProperty(identifier, attr_type, getattr(self, compressedName))
+
+        return VariableByteIntegers.encode(len(buffer)) + buffer
+
+    def readProperty(self, buffer, type, propslen):
+        if type == self.types.index("Byte"):
+            value = buffer[0]
+            valuelen = 1
+        else:
+            if type == self.types.index("Two Byte Integer"):
+                value = readInt16(buffer)
+                valuelen = 2
+            else:
+                if type == self.types.index("Four Byte Integer"):
+                    value = readInt32(buffer)
+                    valuelen = 4
+                else:
+                    if type == self.types.index("Variable Byte Integer"):
+                        value, valuelen = VariableByteIntegers.decode(buffer)
+                    else:
+                        if type == self.types.index("Binary Data"):
+                            value, valuelen = readBytes(buffer)
+                        else:
+                            if type == self.types.index("UTF-8 Encoded String"):
+                                value, valuelen = readUTF(buffer, propslen)
+                            else:
+                                if type == self.types.index("UTF-8 String Pair"):
+                                    value, valuelen = readUTF(buffer, propslen)
+                                    buffer = buffer[valuelen[:None]]
+                                    value1, valuelen1 = readUTF(buffer, propslen - valuelen)
+                                    value = (value, value1)
+                                    valuelen += valuelen1
+                                return (
+                                 value, valuelen)
+
+    def getNameFromIdent(self, identifier):
+        rc = None
+        for name in self.names:
+            if self.names[name] == identifier:
+                rc = name
+
+        return rc
+
+    def unpack(self, buffer):
+        if sys.version_info[0] < 3:
+            buffer = bytearray(buffer)
+        self.clear()
+        propslen, VBIlen = VariableByteIntegers.decode(buffer)
+        buffer = buffer[VBIlen[:None]]
+        propslenleft = propslen
+        while propslenleft > 0:
+            identifier, VBIlen2 = VariableByteIntegers.decode(buffer)
+            buffer = buffer[VBIlen2[:None]]
+            propslenleft -= VBIlen2
+            attr_type = self.properties[identifier][0]
+            value, valuelen = self.readProperty(buffer, attr_type, propslenleft)
+            buffer = buffer[valuelen[:None]]
+            propslenleft -= valuelen
+            propname = self.getNameFromIdent(identifier)
+            compressedName = propname.replace(" ", "")
+            if not self.allowsMultiple(compressedName):
+                if hasattr(self, compressedName):
+                    raise MQTTException("Property '%s' must not exist more than once" % property)
+            setattr(self, propname, value)
+
+        return (
+         self, propslen + VBIlen)
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/publish.py b/APPS_UNCOMPILED/lib/paho/mqtt/publish.py
new file mode 100644
index 0000000..67aae67
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/publish.py
@@ -0,0 +1,210 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/publish.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 9624 bytes
+"""
+This module provides some helper functions to allow straightforward publishing
+of messages in a one-shot manner. In other words, they are useful for the
+situation where you have a single/multiple messages you want to publish to a
+broker, then disconnect and nothing else is required.
+"""
+from __future__ import absolute_import
+import collections
+try:
+    from collections.abc import Iterable
+except ImportError:
+    from collections import Iterable
+
+from .. import mqtt
+from . import client as paho
+
+def _do_publish(client):
+    """Internal function"""
+    message = client._userdata.popleft()
+    if isinstance(message, dict):
+        (client.publish)(**message)
+    else:
+        if isinstance(message, (tuple, list)):
+            (client.publish)(*message)
+        else:
+            raise TypeError("message must be a dict, tuple, or list")
+
+
+def _on_connect(client, userdata, flags, rc):
+    """Internal callback"""
+    if rc == 0:
+        if len(userdata) > 0:
+            _do_publish(client)
+    else:
+        raise mqtt.MQTTException(paho.connack_string(rc))
+
+
+def _on_connect_v5(client, userdata, flags, rc, properties):
+    """Internal v5 callback"""
+    _on_connect(client, userdata, flags, rc)
+
+
+def _on_publish(client, userdata, mid):
+    """Internal callback"""
+    if len(userdata) == 0:
+        client.disconnect()
+    else:
+        _do_publish(client)
+
+
+def multiple(msgs, hostname="localhost", port=1883, client_id="", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport="tcp", proxy_args=None):
+    """Publish multiple messages to a broker, then disconnect cleanly.
+
+    This function creates an MQTT client, connects to a broker and publishes a
+    list of messages. Once the messages have been delivered, it disconnects
+    cleanly from the broker.
+
+    msgs : a list of messages to publish. Each message is either a dict or a
+           tuple.
+
+           If a dict, only the topic must be present. Default values will be
+           used for any missing arguments. The dict must be of the form:
+
+           msg = {'topic':"", 'payload':"", 'qos':,
+           'retain':}
+           topic must be present and may not be empty.
+           If payload is "", None or not present then a zero length payload
+           will be published.
+           If qos is not present, the default of 0 is used.
+           If retain is not present, the default of False is used.
+
+           If a tuple, then it must be of the form:
+           ("", "", qos, retain)
+
+    hostname : a string containing the address of the broker to connect to.
+               Defaults to localhost.
+
+    port : the port to connect to the broker on. Defaults to 1883.
+
+    client_id : the MQTT client id to use. If "" or None, the Paho library will
+                generate a client id automatically.
+
+    keepalive : the keepalive timeout value for the client. Defaults to 60
+                seconds.
+
+    will : a dict containing will parameters for the client: will = {'topic':
+           "", 'payload':", 'qos':, 'retain':}.
+           Topic is required, all other parameters are optional and will
+           default to None, 0 and False respectively.
+           Defaults to None, which indicates no will should be used.
+
+    auth : a dict containing authentication parameters for the client:
+           auth = {'username':"", 'password':""}
+           Username is required, password is optional and will default to None
+           if not provided.
+           Defaults to None, which indicates no authentication is to be used.
+
+    tls : a dict containing TLS configuration parameters for the client:
+          dict = {'ca_certs':"", 'certfile':"",
+          'keyfile':"", 'tls_version':"",
+          'ciphers':", 'insecure':""}
+          ca_certs is required, all other parameters are optional and will
+          default to None if not provided, which results in the client using
+          the default behaviour - see the paho.mqtt.client documentation.
+          Alternatively, tls input can be an SSLContext object, which will be
+          processed using the tls_set_context method.
+          Defaults to None, which indicates that TLS should not be used.
+
+    transport : set to "tcp" to use the default setting of transport which is
+          raw TCP. Set to "websockets" to use WebSockets as the transport.
+    proxy_args: a dictionary that will be given to the client.
+    """
+    if not isinstance(msgs, Iterable):
+        raise TypeError("msgs must be an iterable")
+    else:
+        client = paho.Client(client_id=client_id, userdata=(collections.deque(msgs)), protocol=protocol,
+          transport=transport)
+        client.on_publish = _on_publish
+        if protocol == mqtt.client.MQTTv5:
+            client.on_connect = _on_connect_v5
+        else:
+            client.on_connect = _on_connect
+        if proxy_args is not None:
+            (client.proxy_set)(**proxy_args)
+        if auth:
+            username = auth.get("username")
+            if username:
+                password = auth.get("password")
+                client.username_pw_set(username, password)
+            else:
+                raise KeyError("The 'username' key was not found, this is required for auth")
+        if will is not None:
+            (client.will_set)(**will)
+        if tls is not None:
+            if isinstance(tls, dict):
+                insecure = tls.pop("insecure", False)
+                (client.tls_set)(**tls)
+                if insecure:
+                    client.tls_insecure_set(insecure)
+            else:
+                client.tls_set_context(tls)
+    client.connect(hostname, port, keepalive)
+    client.loop_forever()
+
+
+def single(topic, payload=None, qos=0, retain=False, hostname="localhost", port=1883, client_id="", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport="tcp", proxy_args=None):
+    """Publish a single message to a broker, then disconnect cleanly.
+
+    This function creates an MQTT client, connects to a broker and publishes a
+    single message. Once the message has been delivered, it disconnects cleanly
+    from the broker.
+
+    topic : the only required argument must be the topic string to which the
+            payload will be published.
+
+    payload : the payload to be published. If "" or None, a zero length payload
+              will be published.
+
+    qos : the qos to use when publishing,  default to 0.
+
+    retain : set the message to be retained (True) or not (False).
+
+    hostname : a string containing the address of the broker to connect to.
+               Defaults to localhost.
+
+    port : the port to connect to the broker on. Defaults to 1883.
+
+    client_id : the MQTT client id to use. If "" or None, the Paho library will
+                generate a client id automatically.
+
+    keepalive : the keepalive timeout value for the client. Defaults to 60
+                seconds.
+
+    will : a dict containing will parameters for the client: will = {'topic':
+           "", 'payload':", 'qos':, 'retain':}.
+           Topic is required, all other parameters are optional and will
+           default to None, 0 and False respectively.
+           Defaults to None, which indicates no will should be used.
+
+    auth : a dict containing authentication parameters for the client:
+           auth = {'username':"", 'password':""}
+           Username is required, password is optional and will default to None
+           if not provided.
+           Defaults to None, which indicates no authentication is to be used.
+
+    tls : a dict containing TLS configuration parameters for the client:
+          dict = {'ca_certs':"", 'certfile':"",
+          'keyfile':"", 'tls_version':"",
+          'ciphers':", 'insecure':""}
+          ca_certs is required, all other parameters are optional and will
+          default to None if not provided, which results in the client using
+          the default behaviour - see the paho.mqtt.client documentation.
+          Defaults to None, which indicates that TLS should not be used.
+          Alternatively, tls input can be an SSLContext object, which will be
+          processed using the tls_set_context method.
+
+    transport : set to "tcp" to use the default setting of transport which is
+          raw TCP. Set to "websockets" to use WebSockets as the transport.
+    proxy_args: a dictionary that will be given to the client.
+    """
+    msg = {
+     'topic': topic, 'payload': payload, 'qos': qos, 'retain': retain}
+    multiple([msg], hostname, port, client_id, keepalive, will, auth, tls, protocol, transport, proxy_args)
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/reasoncodes.py b/APPS_UNCOMPILED/lib/paho/mqtt/reasoncodes.py
new file mode 100644
index 0000000..46075b8
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/reasoncodes.py
@@ -0,0 +1,193 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/reasoncodes.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 8591 bytes
+"""
+*******************************************************************
+  Copyright (c) 2017, 2019 IBM Corp.
+
+  All rights reserved. This program and the accompanying materials
+  are made available under the terms of the Eclipse Public License v2.0
+  and Eclipse Distribution License v1.0 which accompany this distribution.
+
+  The Eclipse Public License is available at
+     http://www.eclipse.org/legal/epl-v10.html
+  and the Eclipse Distribution License is available at
+    http://www.eclipse.org/org/documents/edl-v10.php.
+
+  Contributors:
+     Ian Craggs - initial implementation and/or documentation
+*******************************************************************
+"""
+import sys
+from .packettypes import PacketTypes
+
+class ReasonCodes:
+    __doc__ = "MQTT version 5.0 reason codes class.\n\n    See ReasonCodes.names for a list of possible numeric values along with their\n    names and the packets to which they apply.\n\n    "
+
+    def __init__(self, packetType, aName='Success', identifier=-1):
+        """
+        packetType: the type of the packet, such as PacketTypes.CONNECT that
+            this reason code will be used with.  Some reason codes have different
+            names for the same identifier when used a different packet type.
+
+        aName: the String name of the reason code to be created.  Ignored
+            if the identifier is set.
+
+        identifier: an integer value of the reason code to be created.
+
+        """
+        self.packetType = packetType
+        self.names = {0:{'Success':[
+           PacketTypes.CONNACK, PacketTypes.PUBACK,
+           PacketTypes.PUBREC, PacketTypes.PUBREL, PacketTypes.PUBCOMP,
+           PacketTypes.UNSUBACK, PacketTypes.AUTH], 
+          'Normal disconnection':[
+           PacketTypes.DISCONNECT], 
+          'Granted QoS 0':[
+           PacketTypes.SUBACK]}, 
+         1:{"Granted QoS 1": [PacketTypes.SUBACK]}, 
+         2:{"Granted QoS 2": [PacketTypes.SUBACK]}, 
+         4:{"Disconnect with will message": [PacketTypes.DISCONNECT]}, 
+         16:{"No matching subscribers": [
+                                      PacketTypes.PUBACK, PacketTypes.PUBREC]}, 
+         17:{"No subscription found": [PacketTypes.UNSUBACK]}, 
+         24:{"Continue authentication": [PacketTypes.AUTH]}, 
+         25:{"Re-authenticate": [PacketTypes.AUTH]}, 
+         128:{"Unspecified error": [PacketTypes.CONNACK, PacketTypes.PUBACK,
+                                PacketTypes.PUBREC, PacketTypes.SUBACK, PacketTypes.UNSUBACK,
+                                PacketTypes.DISCONNECT]}, 
+         129:{"Malformed packet": [
+                               PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         130:{"Protocol error": [
+                             PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         131:{"Implementation specific error": [PacketTypes.CONNACK,
+                                            PacketTypes.PUBACK, PacketTypes.PUBREC, PacketTypes.SUBACK,
+                                            PacketTypes.UNSUBACK, PacketTypes.DISCONNECT]}, 
+         132:{"Unsupported protocol version": [PacketTypes.CONNACK]}, 
+         133:{"Client identifier not valid": [PacketTypes.CONNACK]}, 
+         134:{"Bad user name or password": [PacketTypes.CONNACK]}, 
+         135:{"Not authorized": [PacketTypes.CONNACK, PacketTypes.PUBACK,
+                             PacketTypes.PUBREC, PacketTypes.SUBACK, PacketTypes.UNSUBACK,
+                             PacketTypes.DISCONNECT]}, 
+         136:{"Server unavailable": [PacketTypes.CONNACK]}, 
+         137:{"Server busy": [PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         138:{"Banned": [PacketTypes.CONNACK]}, 
+         139:{"Server shutting down": [PacketTypes.DISCONNECT]}, 
+         140:{"Bad authentication method": [
+                                        PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         141:{"Keep alive timeout": [PacketTypes.DISCONNECT]}, 
+         142:{"Session taken over": [PacketTypes.DISCONNECT]}, 
+         143:{"Topic filter invalid": [
+                                   PacketTypes.SUBACK, PacketTypes.UNSUBACK, PacketTypes.DISCONNECT]}, 
+         144:{"Topic name invalid": [
+                                 PacketTypes.CONNACK, PacketTypes.PUBACK,
+                                 PacketTypes.PUBREC, PacketTypes.DISCONNECT]}, 
+         145:{"Packet identifier in use": [
+                                       PacketTypes.PUBACK, PacketTypes.PUBREC,
+                                       PacketTypes.SUBACK, PacketTypes.UNSUBACK]}, 
+         146:{"Packet identifier not found": [
+                                          PacketTypes.PUBREL, PacketTypes.PUBCOMP]}, 
+         147:{"Receive maximum exceeded": [PacketTypes.DISCONNECT]}, 
+         148:{"Topic alias invalid": [PacketTypes.DISCONNECT]}, 
+         149:{"Packet too large": [PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         150:{"Message rate too high": [PacketTypes.DISCONNECT]}, 
+         151:{"Quota exceeded": [PacketTypes.CONNACK, PacketTypes.PUBACK,
+                             PacketTypes.PUBREC, PacketTypes.SUBACK, PacketTypes.DISCONNECT]}, 
+         152:{"Administrative action": [PacketTypes.DISCONNECT]}, 
+         153:{"Payload format invalid": [
+                                     PacketTypes.PUBACK, PacketTypes.PUBREC, PacketTypes.DISCONNECT]}, 
+         154:{"Retain not supported": [
+                                   PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         155:{"QoS not supported": [
+                                PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         156:{"Use another server": [
+                                 PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         157:{"Server moved": [
+                           PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         158:{"Shared subscription not supported": [
+                                                PacketTypes.SUBACK, PacketTypes.DISCONNECT]}, 
+         159:{"Connection rate exceeded": [
+                                       PacketTypes.CONNACK, PacketTypes.DISCONNECT]}, 
+         160:{"Maximum connect time": [
+                                   PacketTypes.DISCONNECT]}, 
+         161:{"Subscription identifiers not supported": [
+                                                     PacketTypes.SUBACK, PacketTypes.DISCONNECT]}, 
+         162:{"Wildcard subscription not supported": [
+                                                  PacketTypes.SUBACK, PacketTypes.DISCONNECT]}}
+        if identifier == -1:
+            if packetType == PacketTypes.DISCONNECT:
+                if aName == "Success":
+                    aName = "Normal disconnection"
+            self.set(aName)
+        else:
+            self.value = identifier
+            self.getName()
+
+    def __getName__(self, packetType, identifier):
+        """
+        Get the reason code string name for a specific identifier.
+        The name can vary by packet type for the same identifier, which
+        is why the packet type is also required.
+
+        Used when displaying the reason code.
+        """
+        assert identifier in self.names.keys(), identifier
+        names = self.names[identifier]
+        namelist = [name for name in names.keys() if packetType in names[name]]
+        assert len(namelist) == 1
+        return namelist[0]
+
+    def getId(self, name):
+        """
+        Get the numeric id corresponding to a reason code name.
+
+        Used when setting the reason code for a packetType
+        check that only valid codes for the packet are set.
+        """
+        identifier = None
+        for code in self.names.keys():
+            if name in self.names[code].keys():
+                if self.packetType in self.names[code][name]:
+                    identifier = code
+                break
+
+        assert identifier is not None, name
+        return identifier
+
+    def set(self, name):
+        self.value = self.getId(name)
+
+    def unpack(self, buffer):
+        c = buffer[0]
+        if sys.version_info[0] < 3:
+            c = ord(c)
+        name = self.__getName__(self.packetType, c)
+        self.value = self.getId(name)
+        return 1
+
+    def getName(self):
+        """Returns the reason code name corresponding to the numeric value which is set.
+        """
+        return self.__getName__(self.packetType, self.value)
+
+    def __eq__(self, other):
+        if isinstance(other, int):
+            return self.value == other
+        if isinstance(other, str):
+            return self.value == str(self)
+        if isinstance(other, ReasonCodes):
+            return self.value == other.value
+        return False
+
+    def __str__(self):
+        return self.getName()
+
+    def json(self):
+        return self.getName()
+
+    def pack(self):
+        return bytearray([self.value])
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/subscribe.py b/APPS_UNCOMPILED/lib/paho/mqtt/subscribe.py
new file mode 100644
index 0000000..6c432ff
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/subscribe.py
@@ -0,0 +1,236 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/subscribe.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 11382 bytes
+"""
+This module provides some helper functions to allow straightforward subscribing
+to topics and retrieving messages. The two functions are simple(), which
+returns one or messages matching a set of topics, and callback() which allows
+you to pass a callback for processing of messages.
+"""
+from __future__ import absolute_import
+from .. import mqtt
+from . import client as paho
+
+def _on_connect_v5(client, userdata, flags, rc, properties):
+    """Internal callback"""
+    if rc != 0:
+        raise mqtt.MQTTException(paho.connack_string(rc))
+    if isinstance(userdata["topics"], list):
+        for topic in userdata["topics"]:
+            client.subscribe(topic, userdata["qos"])
+
+    else:
+        client.subscribe(userdata["topics"], userdata["qos"])
+
+
+def _on_connect(client, userdata, flags, rc):
+    """Internal v5 callback"""
+    _on_connect_v5(client, userdata, flags, rc, None)
+
+
+def _on_message_callback(client, userdata, message):
+    """Internal callback"""
+    userdata["callback"](client, userdata["userdata"], message)
+
+
+def _on_message_simple(client, userdata, message):
+    """Internal callback"""
+    if userdata["msg_count"] == 0:
+        return
+    elif message.retain:
+        if not userdata["retained"]:
+            return
+    userdata["msg_count"] = userdata["msg_count"] - 1
+    if userdata["messages"] is None and userdata["msg_count"] == 0:
+        userdata["messages"] = message
+        client.disconnect()
+        return
+    userdata["messages"].append(message)
+    if userdata["msg_count"] == 0:
+        client.disconnect()
+
+
+def callback(callback, topics, qos=0, userdata=None, hostname="localhost", port=1883, client_id="", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport="tcp", clean_session=True, proxy_args=None):
+    """Subscribe to a list of topics and process them in a callback function.
+
+    This function creates an MQTT client, connects to a broker and subscribes
+    to a list of topics. Incoming messages are processed by the user provided
+    callback.  This is a blocking function and will never return.
+
+    callback : function of the form "on_message(client, userdata, message)" for
+               processing the messages received.
+
+    topics : either a string containing a single topic to subscribe to, or a
+             list of topics to subscribe to.
+
+    qos : the qos to use when subscribing. This is applied to all topics.
+
+    userdata : passed to the callback
+
+    hostname : a string containing the address of the broker to connect to.
+               Defaults to localhost.
+
+    port : the port to connect to the broker on. Defaults to 1883.
+
+    client_id : the MQTT client id to use. If "" or None, the Paho library will
+                generate a client id automatically.
+
+    keepalive : the keepalive timeout value for the client. Defaults to 60
+                seconds.
+
+    will : a dict containing will parameters for the client: will = {'topic':
+           "", 'payload':", 'qos':, 'retain':}.
+           Topic is required, all other parameters are optional and will
+           default to None, 0 and False respectively.
+           Defaults to None, which indicates no will should be used.
+
+    auth : a dict containing authentication parameters for the client:
+           auth = {'username':"", 'password':""}
+           Username is required, password is optional and will default to None
+           if not provided.
+           Defaults to None, which indicates no authentication is to be used.
+
+    tls : a dict containing TLS configuration parameters for the client:
+          dict = {'ca_certs':"", 'certfile':"",
+          'keyfile':"", 'tls_version':"",
+          'ciphers':", 'insecure':""}
+          ca_certs is required, all other parameters are optional and will
+          default to None if not provided, which results in the client using
+          the default behaviour - see the paho.mqtt.client documentation.
+          Alternatively, tls input can be an SSLContext object, which will be
+          processed using the tls_set_context method.
+          Defaults to None, which indicates that TLS should not be used.
+
+    transport : set to "tcp" to use the default setting of transport which is
+          raw TCP. Set to "websockets" to use WebSockets as the transport.
+
+    clean_session : a boolean that determines the client type. If True,
+                    the broker will remove all information about this client
+                    when it disconnects. If False, the client is a persistent
+                    client and subscription information and queued messages
+                    will be retained when the client disconnects.
+                    Defaults to True.
+
+    proxy_args: a dictionary that will be given to the client.
+    """
+    if not qos < 0:
+        if qos > 2:
+            raise ValueError("qos must be in the range 0-2")
+        callback_userdata = {
+         'callback': callback, 
+         'topics': topics, 
+         'qos': qos, 
+         'userdata': userdata}
+        client = paho.Client(client_id=client_id, userdata=callback_userdata, protocol=protocol,
+          transport=transport,
+          clean_session=clean_session)
+        client.on_message = _on_message_callback
+        if protocol == mqtt.client.MQTTv5:
+            client.on_connect = _on_connect_v5
+        else:
+            client.on_connect = _on_connect
+        if proxy_args is not None:
+            (client.proxy_set)(**proxy_args)
+        if auth:
+            username = auth.get("username")
+            if username:
+                password = auth.get("password")
+                client.username_pw_set(username, password)
+            else:
+                raise KeyError("The 'username' key was not found, this is required for auth")
+        if will is not None:
+            (client.will_set)(**will)
+        if tls is not None:
+            if isinstance(tls, dict):
+                insecure = tls.pop("insecure", False)
+                (client.tls_set)(**tls)
+                if insecure:
+                    client.tls_insecure_set(insecure)
+    else:
+        client.tls_set_context(tls)
+    client.connect(hostname, port, keepalive)
+    client.loop_forever()
+
+
+def simple(topics, qos=0, msg_count=1, retained=True, hostname="localhost", port=1883, client_id="", keepalive=60, will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport="tcp", clean_session=True, proxy_args=None):
+    """Subscribe to a list of topics and return msg_count messages.
+
+    This function creates an MQTT client, connects to a broker and subscribes
+    to a list of topics. Once "msg_count" messages have been received, it
+    disconnects cleanly from the broker and returns the messages.
+
+    topics : either a string containing a single topic to subscribe to, or a
+             list of topics to subscribe to.
+
+    qos : the qos to use when subscribing. This is applied to all topics.
+
+    msg_count : the number of messages to retrieve from the broker.
+                if msg_count == 1 then a single MQTTMessage will be returned.
+                if msg_count > 1 then a list of MQTTMessages will be returned.
+
+    retained : If set to True, retained messages will be processed the same as
+               non-retained messages. If set to False, retained messages will
+               be ignored. This means that with retained=False and msg_count=1,
+               the function will return the first message received that does
+               not have the retained flag set.
+
+    hostname : a string containing the address of the broker to connect to.
+               Defaults to localhost.
+
+    port : the port to connect to the broker on. Defaults to 1883.
+
+    client_id : the MQTT client id to use. If "" or None, the Paho library will
+                generate a client id automatically.
+
+    keepalive : the keepalive timeout value for the client. Defaults to 60
+                seconds.
+
+    will : a dict containing will parameters for the client: will = {'topic':
+           "", 'payload':", 'qos':, 'retain':}.
+           Topic is required, all other parameters are optional and will
+           default to None, 0 and False respectively.
+           Defaults to None, which indicates no will should be used.
+
+    auth : a dict containing authentication parameters for the client:
+           auth = {'username':"", 'password':""}
+           Username is required, password is optional and will default to None
+           if not provided.
+           Defaults to None, which indicates no authentication is to be used.
+
+    tls : a dict containing TLS configuration parameters for the client:
+          dict = {'ca_certs':"", 'certfile':"",
+          'keyfile':"", 'tls_version':"",
+          'ciphers':", 'insecure':""}
+          ca_certs is required, all other parameters are optional and will
+          default to None if not provided, which results in the client using
+          the default behaviour - see the paho.mqtt.client documentation.
+          Alternatively, tls input can be an SSLContext object, which will be
+          processed using the tls_set_context method.
+          Defaults to None, which indicates that TLS should not be used.
+
+    transport : set to "tcp" to use the default setting of transport which is
+          raw TCP. Set to "websockets" to use WebSockets as the transport.
+
+    clean_session : a boolean that determines the client type. If True,
+                    the broker will remove all information about this client
+                    when it disconnects. If False, the client is a persistent
+                    client and subscription information and queued messages
+                    will be retained when the client disconnects.
+                    Defaults to True.
+
+    proxy_args: a dictionary that will be given to the client.
+    """
+    if msg_count < 1:
+        raise ValueError("msg_count must be > 0")
+    elif msg_count == 1:
+        messages = None
+    else:
+        messages = []
+    userdata = {'retained':retained, 
+     'msg_count':msg_count,  'messages':messages}
+    callback(_on_message_simple, topics, qos, userdata, hostname, port, client_id, keepalive, will, auth, tls, protocol, transport, clean_session, proxy_args)
+    return userdata["messages"]
diff --git a/APPS_UNCOMPILED/lib/paho/mqtt/subscribeoptions.py b/APPS_UNCOMPILED/lib/paho/mqtt/subscribeoptions.py
new file mode 100644
index 0000000..60821ea
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/paho/mqtt/subscribeoptions.py
@@ -0,0 +1,93 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/paho/mqtt/subscribeoptions.py
+# Compiled at: 2024-04-18 03:12:55
+# Size of source mod 2**32: 4616 bytes
+"""
+*******************************************************************
+  Copyright (c) 2017, 2019 IBM Corp.
+
+  All rights reserved. This program and the accompanying materials
+  are made available under the terms of the Eclipse Public License v2.0
+  and Eclipse Distribution License v1.0 which accompany this distribution.
+
+  The Eclipse Public License is available at
+     http://www.eclipse.org/legal/epl-v10.html
+  and the Eclipse Distribution License is available at
+    http://www.eclipse.org/org/documents/edl-v10.php.
+
+  Contributors:
+     Ian Craggs - initial implementation and/or documentation
+*******************************************************************
+"""
+import sys
+
+class MQTTException(Exception):
+    pass
+
+
+class SubscribeOptions(object):
+    __doc__ = "The MQTT v5.0 subscribe options class.\n\n    The options are:\n        qos:                As in MQTT v3.1.1.\n        noLocal:            True or False. If set to True, the subscriber will not receive its own publications.\n        retainAsPublished:  True or False. If set to True, the retain flag on received publications will be as set\n                            by the publisher.\n        retainHandling:     RETAIN_SEND_ON_SUBSCRIBE, RETAIN_SEND_IF_NEW_SUB or RETAIN_DO_NOT_SEND\n                            Controls when the broker should send retained messages:\n                                - RETAIN_SEND_ON_SUBSCRIBE: on any successful subscribe request\n                                - RETAIN_SEND_IF_NEW_SUB: only if the subscribe request is new\n                                - RETAIN_DO_NOT_SEND: never send retained messages\n    "
+    RETAIN_SEND_ON_SUBSCRIBE, RETAIN_SEND_IF_NEW_SUB, RETAIN_DO_NOT_SEND = range(0, 3)
+
+    def __init__(self, qos=0, noLocal=False, retainAsPublished=False, retainHandling=RETAIN_SEND_ON_SUBSCRIBE):
+        """
+        qos:                0, 1 or 2.  0 is the default.
+        noLocal:            True or False. False is the default and corresponds to MQTT v3.1.1 behavior.
+        retainAsPublished:  True or False. False is the default and corresponds to MQTT v3.1.1 behavior.
+        retainHandling:     RETAIN_SEND_ON_SUBSCRIBE, RETAIN_SEND_IF_NEW_SUB or RETAIN_DO_NOT_SEND
+                            RETAIN_SEND_ON_SUBSCRIBE is the default and corresponds to MQTT v3.1.1 behavior.
+        """
+        object.__setattr__(self, "names", [
+         "QoS", "noLocal", "retainAsPublished", "retainHandling"])
+        self.QoS = qos
+        self.noLocal = noLocal
+        self.retainAsPublished = retainAsPublished
+        self.retainHandling = retainHandling
+        assert self.QoS in (0, 1, 2)
+        assert self.retainHandling in (0, 1, 2), "Retain handling should be 0, 1 or 2"
+
+    def __setattr__(self, name, value):
+        if name not in self.names:
+            raise MQTTException(name + " Attribute name must be one of " + str(self.names))
+        object.__setattr__(self, name, value)
+
+    def pack(self):
+        if not self.QoS in (0, 1, 2):
+            raise AssertionError
+        elif not self.retainHandling in (0, 1, 2):
+            raise AssertionError("Retain handling should be 0, 1 or 2")
+        noLocal = 1 if self.noLocal else 0
+        retainAsPublished = 1 if self.retainAsPublished else 0
+        data = [
+         self.retainHandling << 4 | retainAsPublished << 3 | noLocal << 2 | self.QoS]
+        if sys.version_info[0] >= 3:
+            buffer = bytes(data)
+        else:
+            buffer = bytearray(data)
+        return buffer
+
+    def unpack(self, buffer):
+        b0 = buffer[0]
+        self.retainHandling = b0 >> 4 & 3
+        self.retainAsPublished = True if b0 >> 3 & 1 == 1 else False
+        self.noLocal = True if b0 >> 2 & 1 == 1 else False
+        self.QoS = b0 & 3
+        assert self.retainHandling in (0, 1, 2), "Retain handling should be 0, 1 or 2, not %d" % self.retainHandling
+        assert self.QoS in (0, 1, 2), "QoS should be 0, 1 or 2, not %d" % self.QoS
+        return 1
+
+    def __repr__(self):
+        return str(self)
+
+    def __str__(self):
+        return "{QoS=" + str(self.QoS) + ", noLocal=" + str(self.noLocal) + ", retainAsPublished=" + str(self.retainAsPublished) + ", retainHandling=" + str(self.retainHandling) + "}"
+
+    def json(self):
+        data = {'QoS':self.QoS, 
+         'noLocal':self.noLocal, 
+         'retainAsPublished':self.retainAsPublished, 
+         'retainHandling':self.retainHandling}
+        return data
diff --git a/APPS_UNCOMPILED/lib/pycomm3/__init__.py b/APPS_UNCOMPILED/lib/pycomm3/__init__.py
new file mode 100644
index 0000000..e492682
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/__init__.py
@@ -0,0 +1,18 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/__init__.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 1677 bytes
+import logging
+logger = logging.getLogger("pycomm3")
+logger.addHandler(logging.NullHandler())
+from ._version import __version__, __version_info__
+from .const import CommonService, ClassCode, TagService, DataType, ConnectionManagerInstance, ConnectionManagerService
+from .bytes_ import Pack, Unpack
+from .tag import Tag
+from .exceptions import PycommError, CommError, DataError, RequestError
+from .cip_base import CIPDriver
+from .clx import LogixDriver
+from .slc import SLCDriver
diff --git a/APPS_UNCOMPILED/lib/pycomm3/_version.py b/APPS_UNCOMPILED/lib/pycomm3/_version.py
new file mode 100644
index 0000000..48db3d9
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/_version.py
@@ -0,0 +1,9 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/_version.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 1282 bytes
+__version_info__ = (0, 10, 2)
+__version__ = ".".join(((f"{x}") for x in __version_info__))
diff --git a/APPS_UNCOMPILED/lib/pycomm3/bytes_.py b/APPS_UNCOMPILED/lib/pycomm3/bytes_.py
new file mode 100644
index 0000000..7c21652
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/bytes_.py
@@ -0,0 +1,135 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/bytes_.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 4276 bytes
+from struct import pack, unpack
+from .map import EnumMap
+
+def _pack_epath(path, pad_len=False):
+    if len(path) % 2:
+        path += b'\x00'
+    _len = Pack.usint(len(path) // 2)
+    if pad_len:
+        _len += b'\x00'
+    return _len + path
+
+
+def _pack_char(char):
+    unsigned = ord(char)
+    return Pack.sint(unsigned - 256 if unsigned > 127 else unsigned)
+
+
+def _short_string_encode(string):
+    return Pack.usint(len(string)) + (b'').join([_pack_char(x) for x in string])
+
+
+def _short_string_decode(str_data):
+    string_len = str_data[0]
+    return "".join((chr(v + 256) if v < 0 else chr(v) for v in str_data[1[:string_len + 1]]))
+
+
+class Pack(EnumMap):
+    sint = lambda n: pack("b", n)
+    byte = sint
+    usint = lambda n: pack("B", n)
+    int = lambda n: pack("2x}")
+
+    return out
+
+
+def print_bytes_msg(msg, info=''):
+    out = info
+    new_line = True
+    line = 0
+    column = 0
+    for idx, ch in enumerate(msg):
+        if new_line:
+            out += "\n({:0>4d}) ".format(line * 10)
+            new_line = False
+        out += "{:0>2x} ".format(ch)
+        if column == 9:
+            new_line = True
+            column = 0
+            line += 1
+        else:
+            column += 1
+
+    return out
+
+
+PCCC_DATA_FUNCTION = {
+ 'N': '"int"', 
+ 'B': '"int"', 
+ 'T': '"int"', 
+ 'C': '"int"', 
+ 'S': '"int"', 
+ 'F': '"real"', 
+ 'A': '"sint"', 
+ 'R': '"dint"', 
+ 'O': '"int"', 
+ 'I': '"int"'}
diff --git a/APPS_UNCOMPILED/lib/pycomm3/cip_base.py b/APPS_UNCOMPILED/lib/pycomm3/cip_base.py
new file mode 100644
index 0000000..7b0665a
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/cip_base.py
@@ -0,0 +1,486 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/cip_base.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 20905 bytes
+__all__ = [
+ "CIPDriver", "with_forward_open", "parse_connection_path"]
+import logging, socket
+from functools import wraps
+from os import urandom
+from typing import Union, Optional, List
+from .exceptions import DataError, CommError, RequestError
+from .tag import Tag
+from .bytes_ import Pack, Unpack
+from .const import PATH_SEGMENTS, ConnectionManagerInstance, PRIORITY, ClassCode, TIMEOUT_MULTIPLIER, TIMEOUT_TICKS, TRANSPORT_CLASS, PRODUCT_TYPES, VENDORS, STATES, MSG_ROUTER_PATH, ConnectionManagerService, CommonService
+from .packets import REQUEST_MAP, RequestPacket, DataFormatType
+from .socket_ import Socket
+
+def with_forward_open(func):
+    """Decorator to ensure a forward open request has been completed with the plc"""
+
+    @wraps(func)
+    def wrapped(self, *args, **kwargs):
+        opened = False
+        if not self._forward_open():
+            if self._cfg["extended forward open"]:
+                logger = logging.getLogger("pycomm3.clx.LogixDriver")
+                logger.info("Extended Forward Open failed, attempting standard Forward Open.")
+                self._cfg["extended forward open"] = False
+                if self._forward_open():
+                    opened = True
+        else:
+            opened = True
+        if not opened:
+            msg = f"Target did not connected. {func.__name__} will not be executed."
+            raise DataError(msg)
+        return func(self, *args, **kwargs)
+
+    return wrapped
+
+
+class CIPDriver:
+    __doc__ = "\n    A base CIP driver for the SLCDriver and LogixDriver classes.  Implements common CIP services like\n    (un)registering sessions, forward open/close, generic messaging, etc.\n    "
+    _CIPDriver__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, path: str, port: int=44818, *args, large_packets: bool=True, **kwargs):
+        """
+        :param path: CIP path to intended target
+
+            The path may contain 3 forms:
+
+            - IP Address Only (``10.20.30.100``) - Use for a ControlLogix PLC is in slot 0 or if connecting to a CompactLogix or Micro800 PLC.
+            - IP Address/Slot (``10.20.30.100/1``) - (ControlLogix) if PLC is not in slot 0
+            - CIP Routing Path (``1.2.3.4/backplane/2/enet/6.7.8.9/backplane/0``) - Use for more complex routing.
+
+            .. note::
+
+                Both the IP Address and IP Address/Slot options are shortcuts, they will be replaced with the
+                CIP path automatically.  The ``enet`` / ``backplane`` (or ``bp``) segments are symbols for the CIP routing
+                port numbers and will be replaced with the correct value.
+
+        :param large_packets: if True (default), the *Extended Forward Open* service will be used
+
+            .. note::
+
+                *Extended Forward Open* allows the used of 4KBs of service data in each request.
+                The standard *Forward Open* is limited to 500 bytes.  Not all hardware supports the large packet size,
+                like ENET or ENBT modules or ControlLogix version 19 or lower.  **This argument is no longer required
+                as of 0.5.1, since it will automatically try a standard Forward Open if the extended one fails**
+        """
+        self._sequence_number = 1
+        self._sock = None
+        self._session = 0
+        self._connection_opened = False
+        self._target_cid = None
+        self._target_is_connected = False
+        self._info = {}
+        ip, _path = parse_connection_path(path)
+        self._cfg = {'context':b'_pycomm_', 
+         'protocol version':b'\x01\x00', 
+         'rpi':5000, 
+         'port':port, 
+         'timeout':10, 
+         'ip address':ip, 
+         'cip_path':_path[1[:None]], 
+         'option':0, 
+         'cid':b"'\x04\x19q", 
+         'csn':b"'\x04", 
+         'vid':b'\t\x10', 
+         'vsn':b'\t\x10\x19q', 
+         'name':"LogixDriver", 
+         'extended forward open':large_packets}
+
+    def __enter__(self):
+        self.open()
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        try:
+            self.close()
+        except CommError:
+            self._CIPDriver__log.exception("Error closing connection.")
+            return False
+        else:
+            if not exc_type:
+                return True
+            self._CIPDriver__log.exception("Unhandled Client Error", exc_info=(exc_type, exc_val, exc_tb))
+            return False
+
+    def __repr__(self):
+        _ = self._info
+        return f'Program Name: {_.get("name")}, Device: {_.get("device_type", "None")}, Revision: {_.get("revision", "None")}'
+
+    @property
+    def connected(self) -> bool:
+        """
+        Read-Only Property to check whether or not a connection is open.
+
+        :return: True if a connection is open, False otherwise
+        """
+        return self._connection_opened
+
+    @property
+    def connection_size(self):
+        """CIP connection size, ``4000`` if using Extended Forward Open else ``500``"""
+        if self._cfg["extended forward open"]:
+            return 4000
+        return 500
+
+    def new_request(self, command: str, *args, **kwargs) -> RequestPacket:
+        """
+        Creates a new request packet for the given command.
+        If the command is invalid, a base :class:`RequestPacket` is created.
+
+        Commands:
+            - `send_unit_data`
+            - `send_rr_data`
+            - `register_session`
+            - `unregister_session`
+            - `list_identity`
+            - `multi_request`
+            - `read_tag`
+            - `read_tag_fragmented`
+            - `write_tag`
+            - `write_tag_fragmented`
+            - `generic_connected`
+            - `generic_unconnected`
+
+        :param command: the service for which a request will be created
+        :return: a new request for the command
+        """
+        cls = REQUEST_MAP[command]
+        return cls(self, *args, **kwargs)
+
+    @property
+    def _sequence(self) -> int:
+        """
+        Increment and return the sequence id used with connected messages
+
+        :return: The next sequence number
+        """
+        self._sequence_number += 1
+        if self._sequence_number >= 65535:
+            self._sequence_number = 1
+        return self._sequence_number
+
+    @classmethod
+    def list_identity(cls, path) -> Optional[str]:
+        """
+        Uses the ListIdentity service to identify the target
+
+        :return: device identity if reply contains valid response else None
+        """
+        plc = cls(path, init_tags=False, init_info=False)
+        plc.open()
+        identity = plc._list_identity()
+        plc.close()
+        return identity
+
+    def _list_identity(self):
+        request = self.new_request("list_identity")
+        response = request.send()
+        return response.identity
+
+    def get_module_info(self, slot):
+        try:
+            response = self.generic_message(service=(CommonService.get_attributes_all),
+              class_code=(ClassCode.identity_object),
+              instance=b'\x01',
+              connected=False,
+              unconnected_send=True,
+              route_path=Pack.epath((Pack.usint(PATH_SEGMENTS["bp"]) + Pack.usint(slot)), pad_len=True))
+            if response:
+                return _parse_identity_object(response.value)
+            raise DataError(f"send_rr_data did not return valid data - {response.error}")
+        except Exception as err:
+            try:
+                raise DataError("error sending request") from err
+            finally:
+                err = None
+                del err
+
+    def open(self):
+        """
+        Creates a new Ethernet/IP socket connection to target device and registers a CIP session.
+
+        :return: True if successful, False otherwise
+        """
+        if self._connection_opened:
+            return
+        try:
+            if self._sock is None:
+                self._sock = Socket()
+            self._sock.connect(self._cfg["ip address"], self._cfg["port"])
+            self._connection_opened = True
+            self._cfg["cid"] = urandom(4)
+            self._cfg["vsn"] = urandom(4)
+            if self._register_session() is None:
+                self._CIPDriver__log.warning("Session not registered")
+                return False
+            return True
+        except Exception as err:
+            try:
+                raise CommError("failed to open a connection") from err
+            finally:
+                err = None
+                del err
+
+    def _register_session(self) -> Optional[int]:
+        """
+        Registers a new CIP session with the target.
+
+        :return: the session id if session registered successfully, else None
+        """
+        if self._session:
+            return self._session
+        self._session = 0
+        request = self.new_request("register_session")
+        request.add(self._cfg["protocol version"], b'\x00\x00')
+        response = request.send()
+        if response:
+            self._session = response.session
+            self._CIPDriver__log.info(f"Session = {response.session} has been registered.")
+            return self._session
+        self._CIPDriver__log.warning("Session has not been registered.")
+
+    def _forward_open(self):
+        """
+        Opens a new connection with the target PLC using the *Forward Open* or *Extended Forward Open* service.
+
+        :return: True if connection is open or was successfully opened, False otherwise
+        """
+        if self._target_is_connected:
+            return True
+        elif self._session == 0:
+            raise CommError("A Session Not Registered Before forward_open.")
+        init_net_params = 16896
+        if self._cfg["extended forward open"]:
+            net_params = Pack.udint(self.connection_size & 65535 | init_net_params << 16)
+        else:
+            net_params = Pack.uint(self.connection_size & 511 | init_net_params)
+        route_path = Pack.epath(self._cfg["cip_path"] + MSG_ROUTER_PATH)
+        service = ConnectionManagerService.forward_open if not self._cfg["extended forward open"] else ConnectionManagerService.large_forward_open
+        forward_open_msg = [
+         PRIORITY,
+         TIMEOUT_TICKS,
+         b'\x00\x00\x00\x00',
+         self._cfg["cid"],
+         self._cfg["csn"],
+         self._cfg["vid"],
+         self._cfg["vsn"],
+         TIMEOUT_MULTIPLIER,
+         b'\x00\x00\x00',
+         b'\x01@ \x00',
+         net_params,
+         b'\x01@ \x00',
+         net_params,
+         TRANSPORT_CLASS]
+        response = self.generic_message(service=service,
+          class_code=(ClassCode.connection_manager),
+          instance=(ConnectionManagerInstance.open_request),
+          request_data=((b'').join(forward_open_msg)),
+          route_path=route_path,
+          connected=False,
+          name="__FORWARD_OPEN__")
+        if response:
+            self._target_cid = response.value[None[:4]]
+            self._target_is_connected = True
+            self._CIPDriver__log.info(f'{"Extended " if self._cfg["extended forward open"] else ""}Forward Open succeeded. Target CID={self._target_cid}')
+            return True
+        self._CIPDriver__log.warning(f"forward_open failed - {response.error}")
+        return False
+
+    def close(self):
+        """
+        Closes the current connection and un-registers the session.
+        """
+        errs = []
+        try:
+            if self._target_is_connected:
+                self._forward_close()
+            if self._session != 0:
+                self._un_register_session()
+        except Exception as err:
+            try:
+                errs.append(err)
+                self._CIPDriver__log.warning(f"Error on close() -> session Err: {err}")
+            finally:
+                err = None
+                del err
+
+        try:
+            if self._sock:
+                self._sock.close()
+        except Exception as err:
+            try:
+                errs.append(err)
+                self._CIPDriver__log.warning(f"close() -> _sock.close Err: {err}")
+            finally:
+                err = None
+                del err
+
+        self._sock = None
+        self._target_is_connected = False
+        self._session = 0
+        self._connection_opened = False
+        if errs:
+            raise CommError(" - ".join((str(e) for e in errs)))
+
+    def _un_register_session(self):
+        """
+        Un-registers the current session with the target.
+        """
+        request = self.new_request("unregister_session")
+        request.send()
+        self._session = None
+        self._CIPDriver__log.info("Session Unregistered")
+
+    def _forward_close(self):
+        """ CIP implementation of the forward close message
+
+        Each connection opened with the forward open message need to be closed.
+        Refer to ODVA documentation Volume 1 3-5.5.3
+
+        :return: False if any error in the replayed message
+        """
+        if self._session == 0:
+            raise CommError("A session need to be registered before to call forward_close.")
+        route_path = Pack.epath((self._cfg["cip_path"] + MSG_ROUTER_PATH), pad_len=True)
+        forward_close_msg = [
+         PRIORITY,
+         TIMEOUT_TICKS,
+         self._cfg["csn"],
+         self._cfg["vid"],
+         self._cfg["vsn"]]
+        response = self.generic_message(service=(ConnectionManagerService.forward_close),
+          class_code=(ClassCode.connection_manager),
+          instance=(ConnectionManagerInstance.open_request),
+          connected=False,
+          route_path=route_path,
+          request_data=((b'').join(forward_close_msg)),
+          name="__FORWARD_CLOSE__")
+        if response:
+            self._target_is_connected = False
+            self._CIPDriver__log.info("Forward Close succeeded.")
+            return True
+        self._CIPDriver__log.warning(f"forward_close failed - {response.error}")
+        return False
+
+    def generic_message(self, service: Union[(int, bytes)], class_code: Union[(int, bytes)], instance: Union[(int, bytes)], attribute: Union[(int, bytes)]=b'', request_data: bytes=b'', data_format: Optional[DataFormatType]=None, name: str='generic', connected: bool=True, unconnected_send: bool=False, route_path: Union[(bool, bytes)]=True) -> Tag:
+        """
+        Perform a generic CIP message.  Similar to how MSG instructions work in Logix.
+
+        :param service: service code for the request (single byte)
+        :param class_code: request object class ID
+        :param instance: instance ID of the class
+        :param attribute: (optional) attribute ID for the service/class/instance
+        :param request_data: (optional) any additional data required for the request
+        :param data_format: (reads only) If provided, a read response will automatically be unpacked into the attributes
+                            defined, must be a sequence of tuples, (attribute name, data_type).
+                            If name is ``None`` or an empty string, it will be ignored. If data-type is an ``int`` it will
+                            not be unpacked, but left as ``bytes``.  Data will be returned as a ``dict``.
+                            If ``None``, response data will be returned as just ``bytes``.
+        :param name:  return ``Tag.tag`` value, arbitrary but can be used for tracking returned Tags
+        :param connected: ``True`` if service required a CIP connection (forward open), ``False`` to use UCMM
+        :param unconnected_send: (Unconnected Only) wrap service in an UnconnectedSend service
+        :param route_path: (Unconnected Only) ``True`` to use current connection route to destination, ``False`` to ignore,
+                           Or provide a packed EPATH (``bytes``) route to use.
+        :return: a Tag with the result of the request. (Tag.value for writes will be the request_data)
+        """
+        if connected:
+            with_forward_open((lambda _: None))(self)
+        _kwargs = {
+         'service': service, 
+         'class_code': class_code, 
+         'instance': instance, 
+         'attribute': attribute, 
+         'request_data': request_data, 
+         'data_format': data_format}
+        if not connected:
+            if route_path is True:
+                _kwargs["route_path"] = Pack.epath((self._cfg["cip_path"]), pad_len=True)
+            else:
+                if route_path:
+                    _kwargs["route_path"] = route_path
+            _kwargs["unconnected_send"] = unconnected_send
+        request = self.new_request("generic_connected" if connected else "generic_unconnected")
+        (request.build)(**_kwargs)
+        response = request.send()
+        return Tag(name, (response.value), None, error=(response.error))
+
+
+def parse_connection_path(path):
+    ip, *segments = path.split("/")
+    try:
+        socket.inet_aton(ip)
+    except OSError:
+        raise RequestError("Invalid IP Address", ip)
+
+    segments = [_parse_cip_path_segment(s) for s in segments]
+    if not segments:
+        _path = [
+         Pack.usint(PATH_SEGMENTS["backplane"]), b'\x00']
+    else:
+        if len(segments) == 1:
+            _path = [
+             Pack.usint(PATH_SEGMENTS["backplane"]), Pack.usint(segments[0])]
+        else:
+            pairs = (segments[i[:i + 2]] for i in range(0, len(segments), 2))
+            _path = []
+            for port, dest in pairs:
+                if isinstance(dest, bytes):
+                    port |= 16
+                    dest_len = len(dest)
+                    if dest_len % 2:
+                        dest += b'\x00'
+                    _path.extend([Pack.usint(port), Pack.usint(dest_len), dest])
+                else:
+                    _path.extend([Pack.usint(port), Pack.usint(dest)])
+
+    return (
+     ip, Pack.epath((b'').join(_path)))
+
+
+def _parse_cip_path_segment(segment: str):
+    try:
+        if segment.isnumeric():
+            return int(segment)
+        else:
+            tmp = PATH_SEGMENTS.get(segment.lower())
+            if tmp:
+                return tmp
+            try:
+                socket.inet_aton(segment)
+                return (b'').join((Pack.usint(ord(c)) for c in segment))
+            except OSError:
+                raise RequestError("Invalid IP Address Segment", segment)
+
+    except Exception:
+        raise RequestError("Failed to parse path segment", segment)
+
+
+def _parse_identity_object(reply):
+    vendor = Unpack.uint(reply[None[:2]])
+    product_type = Unpack.uint(reply[2[:4]])
+    product_code = Unpack.uint(reply[4[:6]])
+    major_fw = int(reply[6])
+    minor_fw = int(reply[7])
+    status = f"{Unpack.uint(reply[8[:10]]):0{16}b}"
+    serial_number = f"{Unpack.udint(reply[10[:14]]):0{8}x}"
+    product_name_len = int(reply[14])
+    tmp = 15 + product_name_len
+    device_type = reply[15[:tmp]].decode()
+    state = Unpack.uint(reply[tmp[:tmp + 4]]) if reply[tmp[:None]] else -1
+    return {'vendor':(VENDORS.get)(vendor, "UNKNOWN"), 
+     'product_type':(PRODUCT_TYPES.get)(product_type, "UNKNOWN"), 
+     'product_code':product_code, 
+     'version_major':major_fw, 
+     'version_minor':minor_fw, 
+     'revision':f"{major_fw}.{minor_fw}", 
+     'serial':serial_number, 
+     'device_type':device_type, 
+     'status':status, 
+     'state':(STATES.get)(state, "UNKNOWN")}
diff --git a/APPS_UNCOMPILED/lib/pycomm3/clx.py b/APPS_UNCOMPILED/lib/pycomm3/clx.py
new file mode 100644
index 0000000..f659d82
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/clx.py
@@ -0,0 +1,1181 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/clx.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 54214 bytes
+__all__ = [
+ "LogixDriver"]
+import datetime, itertools, logging, time
+from typing import List, Tuple, Optional, Union
+from .exceptions import DataError, CommError, RequestError
+from .tag import Tag
+from .bytes_ import Pack, Unpack
+from .cip_base import CIPDriver, with_forward_open
+from .const import TagService, EXTENDED_SYMBOL, CLASS_TYPE, INSTANCE_TYPE, ClassCode, DataType, PRODUCT_TYPES, VENDORS, MICRO800_PREFIX, READ_RESPONSE_OVERHEAD, MULTISERVICE_READ_OVERHEAD, CommonService, SUCCESS, INSUFFICIENT_PACKETS, BASE_TAG_BIT, MIN_VER_INSTANCE_IDS, SEC_TO_US, KEYSWITCH, TEMPLATE_MEMBER_INFO_LEN, EXTERNAL_ACCESS, DataTypeSize, MIN_VER_EXTERNAL_ACCESS
+from .packets import request_path
+AtomicValueType = Union[(int, float, bool, str)]
+TagValueType = Union[(AtomicValueType, List[AtomicValueType])]
+ReadWriteReturnType = Union[(Tag, List[Tag])]
+
+class LogixDriver(CIPDriver):
+    __doc__ = "\n    An Ethernet/IP Client driver for reading and writing tags in ControlLogix and CompactLogix PLCs.\n    "
+    _LogixDriver__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, path, *args, micro800=False, init_info=True, init_tags=True, init_program_tags=False, port=44818, **kwargs):
+        """
+        :param path: CIP path to intended target
+
+            The path may contain 3 forms:
+
+            - IP Address Only (``10.20.30.100``) - Use for a ControlLogix PLC is in slot 0 or if connecting to a CompactLogix or Micro800 PLC.
+            - IP Address/Slot (``10.20.30.100/1``) - (ControlLogix) if PLC is not in slot 0
+            - CIP Routing Path (``1.2.3.4/backplane/2/enet/6.7.8.9/backplane/0``) - Use for more complex routing.
+
+            .. note::
+
+                Both the IP Address and IP Address/Slot options are shortcuts, they will be replaced with the
+                CIP path automatically.  The ``enet`` / ``backplane`` (or ``bp``) segments are symbols for the CIP routing
+                port numbers and will be replaced with the correct value.
+
+        :param init_info:  if True (default), initializes controller info (name, revision, etc) on connect
+
+            .. note::
+
+                Initializing the controller info will enable/disable the use of *Symbol Instance Addressing* in
+                the :meth:`.read` and :meth:`.write` methods.  If you disable this option and are using an older firmware
+                (below v21), you will need to set ``plc.use_instance_ids`` to False or the reads and writes will fail.
+
+        :param init_tags: if True (default), uploads all controller-scoped tag definitions on connect
+        :param init_program_tags: if True, uploads all program-scoped tag definitions on connect
+        :param micro800: set to True if connecting to a Micro800 series PLC with ``init_info`` disabled, it will disable unsupported features
+
+        .. tip::
+
+            Initialization of tags is required for the :meth:`.read` and :meth:`.write` to work.  This is because
+            they require information about the data type and structure of the tags inside the controller.  If opening
+            multiple connections to the same controller, you may disable tag initialization in all but the first connection
+            and set ``plc2._tags = plc1.tags`` to prevent needing to upload the tag definitions multiple times.
+
+        """
+        (super().__init__)(path, port, *args, **kwargs)
+        self._cache = None
+        self._data_types = {}
+        self._tags = {}
+        self._micro800 = micro800
+        self._cfg["use_instance_ids"] = True
+        if init_tags or init_info:
+            self.open()
+        if init_info:
+            target_identity = self._list_identity()
+            self._micro800 = target_identity.get("product_name", "").startswith(MICRO800_PREFIX)
+            self.get_plc_info()
+            self.use_instance_ids = self.info.get("version_major", 0) >= MIN_VER_INSTANCE_IDS and not self._micro800
+            if not self._micro800:
+                self.get_plc_name()
+        if self._micro800:
+            _path = Pack.epath(self._cfg["cip_path"][None[:-2]])
+            self._cfg["cip_path"] = _path[1[:None]]
+        if init_tags:
+            self.get_tag_list(program=("*" if init_program_tags else None))
+
+    def __enter__(self):
+        self.open()
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        try:
+            self.close()
+        except CommError:
+            self._LogixDriver__log.exception("Error closing connection.")
+            return False
+        else:
+            if not exc_type:
+                return True
+            self._LogixDriver__log.exception("Unhandled Client Error", exc_info=(exc_type, exc_val, exc_tb))
+            return False
+
+    def __repr__(self):
+        _ = self._info
+        return f'Program Name: {_.get("name")}, Device: {_.get("device_type", "None")}, Revision: {_.get("revision", "None")}'
+
+    @property
+    def tags(self) -> dict:
+        """
+        Read-only property to access all the tag definitions uploaded from the controller.
+        """
+        return self._tags
+
+    @property
+    def data_types(self) -> dict:
+        """
+        Read-only property for access to all data type definitions uploaded from the controller.
+        """
+        return self._data_types
+
+    @property
+    def connected(self) -> bool:
+        """
+        Read-Only Property to check whether or not a connection is open.
+
+        :return: True if a connection is open, False otherwise
+        """
+        return self._connection_opened
+
+    @property
+    def info(self) -> dict:
+        """
+        Property containing a dict of all the information collected about the connected PLC.
+
+        **Fields**:
+
+        - *vendor* - name of hardware vendor, e.g. ``'Rockwell Automation/Allen-Bradley'``
+        - *product_type* - typically ``'Programmable Logic Controller'``
+        - *product_code* - code identifying the product type
+        - *version_major* - numeric value of major firmware version, e.g. ``28``
+        - *version_minor* - numeric value of minor firmware version, e.g ``13``
+        - *revision* - string value of firmware major and minor version, e.g. ``'28.13'``
+        - *serial* - hex string of PLC serial number, e.g. ``'FFFFFFFF'``
+        - *device_type* - string value for PLC device type, e.g. ``'1756-L83E/B'``
+        - *keyswitch* - string value representing the current keyswitch position, e.g. ``'REMOTE RUN'``
+        - *name* - string value of the current PLC program name, e.g. ``'PLCA'``
+
+        **The following fields are added from calling** :meth:`.get_tag_list`
+
+        - *programs* - dict of all Programs in the PLC and their routines, ``{program: {'routines': [routine, ...}...}``
+        - *tasks* - dict of all Tasks in the PLC, ``{task: {'instance_id': ...}...}``
+        - *modules* - dict of I/O modules in the PLC, ``{module: {'slots': {1: {'types': ['O,' 'I', 'C']}, ...}, 'types':[...]}...}``
+
+        """
+        return self._info
+
+    @property
+    def name(self) -> Optional[str]:
+        """
+        :return: name of PLC program
+        """
+        return self._info.get("name")
+
+    @property
+    def use_instance_ids(self):
+        return self._cfg["use_instance_ids"]
+
+    @use_instance_ids.setter
+    def use_instance_ids(self, value):
+        self._cfg["use_instance_ids"] = value
+
+    @with_forward_open
+    def get_plc_name(self) -> str:
+        """
+        Requests the name of the program running in the PLC. Uses KB `23341`_ for implementation.
+
+        .. _23341: https://rockwellautomation.custhelp.com/app/answers/answer_view/a_id/23341
+
+        :return:  the controller program name
+        """
+        try:
+            response = self.generic_message(service=(CommonService.get_attribute_list),
+              class_code=(ClassCode.program_name),
+              instance=b'\x01\x00',
+              request_data=b'\x01\x00\x01\x00')
+            if response:
+                self._info["name"] = _parse_plc_name(response.value)
+                return self._info["name"]
+            raise DataError(f"send_unit_data did not return valid data - {response.error}")
+        except Exception as err:
+            try:
+                raise DataError("failed to get the plc name") from err
+            finally:
+                err = None
+                del err
+
+    def get_plc_info(self) -> dict:
+        """
+        Reads basic information from the controller, returns it and stores it in the ``info`` property.
+        """
+        try:
+            response = self.generic_message(class_code=(ClassCode.identity_object),
+              instance=b'\x01',
+              service=(CommonService.get_attributes_all),
+              data_format=[
+             ('vendor', 'INT'), ('product_type', 'INT'), 
+             ('product_code', 'INT'), 
+             ('version_major', 'SINT'), 
+             ('version_minor', 'SINT'), ('_keyswitch', 2), 
+             ('serial', 'DINT'), 
+             ('device_type', 'SHORT_STRING')],
+              connected=False,
+              unconnected_send=(not self._micro800))
+            if response:
+                info = _parse_plc_info(response.value)
+                self._info = {**(self._info), **info}
+                return info
+            raise DataError(f"get_plc_info did not return valid data - {response.error}")
+        except Exception as err:
+            try:
+                raise DataError("Failed to get PLC info") from err
+            finally:
+                err = None
+                del err
+
+    @with_forward_open
+    def get_tag_list(self, program: str=None, cache: bool=True) -> List[dict]:
+        """
+        Reads the tag list from the controller and the definition for each tag.  Definitions include tag name, tag type
+        (atomic vs struct), data type (including nested definitions for structs), external access, dimensions defined (0-3)
+        for arrays and their length, etc.
+
+        .. note::
+
+            For program scoped tags the tag['tag_name'] will be ``'Program:{program}.{tag_name}'``. This is so the tag
+            list can be fed directly into the read function.
+
+        :param program: scope to retrieve tag list, None for controller-only tags, ``'*'`` for all tags, else name of program
+        :param cache: store the retrieved list in the :attr:`.tags` property.  Disable if you wish to get tags retrieved
+                      to not overwrite the currently cached definition. For instance if you're checking tags in a single
+                      program but currently reading controller-scoped tags.
+
+        :return: a list containing dicts for each tag definition collected
+        """
+        self._cache = {'tag_name:id':{},  'id:struct':{},  'handle:id':{},  'id:udt':{}}
+        if program in ('*', None):
+            self._info["programs"] = {}
+            self._info["tasks"] = {}
+            self._info["modules"] = {}
+        if program == "*":
+            tags = self._get_tag_list()
+            for prog in self._info["programs"]:
+                tags += self._get_tag_list(prog)
+
+        else:
+            tags = self._get_tag_list(program)
+        if cache:
+            self._tags = {tag["tag_name"]: tag for tag in tags}
+        self._cache = None
+        return tags
+
+    def _get_tag_list(self, program=None):
+        all_tags = self._get_instance_attribute_list_service(program)
+        user_tags = self._isolate_user_tags(all_tags, program)
+        for tag in user_tags:
+            if tag["tag_type"] == "struct":
+                tag["data_type"] = self._get_data_type(tag["template_instance_id"])
+
+        return user_tags
+
+    def _get_instance_attribute_list_service(self, program=None):
+        """ Step 1: Finding user-created controller scope tags in a Logix5000 controller
+
+        This service returns instance IDs for each created instance of the symbol class, along with a list
+        of the attribute data associated with the requested attribute
+        """
+        try:
+            last_instance = 0
+            tag_list = []
+            while last_instance != -1:
+                path = []
+                if program:
+                    if not program.startswith("Program:"):
+                        program = f"Program:{program}"
+                    path = [
+                     EXTENDED_SYMBOL, Pack.usint(len(program)), program.encode("utf-8")]
+                    if len(program) % 2:
+                        path.append(b'\x00')
+                path += [
+                 CLASS_TYPE["8-bit"],
+                 ClassCode.symbol_object,
+                 INSTANCE_TYPE["16-bit"],
+                 Pack.uint(last_instance)]
+                path = (b'').join(path)
+                path_size = Pack.usint(len(path) // 2)
+                request = self.new_request("send_unit_data")
+                attributes = [
+                 b'\x01\x00', 
+                 b'\x02\x00', 
+                 b'\x03\x00', 
+                 b'\x05\x00', 
+                 b'\x06\x00', 
+                 b'\x08\x00']
+                if self.info.get("version_major", 0) >= MIN_VER_EXTERNAL_ACCESS:
+                    attributes.append(b'\n\x00')
+                (request.add)(TagService.get_instance_attribute_list, path_size, path, Pack.uint(len(attributes)), *attributes)
+                response = request.send()
+                if not response:
+                    raise DataError(f"send_unit_data returned not valid data - {response.error}")
+                last_instance = self._parse_instance_attribute_list(response, tag_list)
+
+            return tag_list
+        except Exception as err:
+            try:
+                raise DataError("failed to get attribute list") from err
+            finally:
+                err = None
+                del err
+
+    def _parse_instance_attribute_list(self, response, tag_list):
+        """ extract the tags list from the message received"""
+        tags_returned = response.data
+        tags_returned_length = len(tags_returned)
+        idx = count = instance = 0
+        try:
+            while idx < tags_returned_length:
+                instance = Unpack.dint(tags_returned[idx[:idx + 4]])
+                idx += 4
+                tag_length = Unpack.uint(tags_returned[idx[:idx + 2]])
+                idx += 2
+                tag_name = tags_returned[idx[:idx + tag_length]]
+                idx += tag_length
+                symbol_type = Unpack.uint(tags_returned[idx[:idx + 2]])
+                idx += 2
+                count += 1
+                symbol_address = Unpack.udint(tags_returned[idx[:idx + 4]])
+                idx += 4
+                symbol_object_address = Unpack.udint(tags_returned[idx[:idx + 4]])
+                idx += 4
+                software_control = Unpack.udint(tags_returned[idx[:idx + 4]])
+                idx += 4
+                dim1 = Unpack.udint(tags_returned[idx[:idx + 4]])
+                idx += 4
+                dim2 = Unpack.udint(tags_returned[idx[:idx + 4]])
+                idx += 4
+                dim3 = Unpack.udint(tags_returned[idx[:idx + 4]])
+                idx += 4
+                if self.info.get("version_major", 0) >= MIN_VER_EXTERNAL_ACCESS:
+                    access = tags_returned[idx] & 3
+                    idx += 1
+                else:
+                    access = None
+                tag_list.append({'instance_id':instance,  'tag_name':tag_name, 
+                 'symbol_type':symbol_type, 
+                 'symbol_address':symbol_address, 
+                 'symbol_object_address':symbol_object_address, 
+                 'software_control':software_control, 
+                 'external_access':(EXTERNAL_ACCESS.get)(access, "Unknown"), 
+                 'dimensions':[
+                  dim1, dim2, dim3]})
+
+        except Exception as err:
+            try:
+                raise DataError("failed to parse instance attribute list") from err
+            finally:
+                err = None
+                del err
+
+        if response.service_status == SUCCESS:
+            last_instance = -1
+        else:
+            if response.service_status == INSUFFICIENT_PACKETS:
+                last_instance = instance + 1
+            else:
+                self._LogixDriver__log.warning("unknown status during _parse_instance_attribute_list")
+                last_instance = -1
+        return last_instance
+
+    def _isolate_user_tags(self, all_tags, program=None):
+        try:
+            user_tags = []
+            for tag in all_tags:
+                io_tag = False
+                name = tag["tag_name"].decode()
+                if name.startswith("Program:"):
+                    prog_name = name.replace("Program:", "")
+                    self._info["programs"][prog_name] = {'instance_id':tag["instance_id"],  'routines':[]}
+                    continue
+                if name.startswith("Routine:"):
+                    rtn_name = name.replace("Routine:", "")
+                    _program = self._info["programs"].get(program)
+                    if _program is None:
+                        self._LogixDriver__log.error(f"Program {program} not defined in tag list")
+                    else:
+                        _program["routines"].append(rtn_name)
+                        continue
+                if name.startswith("Task:"):
+                    self._info["tasks"][name.replace("Task:", "")] = {"instance_id": (tag["instance_id"])}
+                    continue
+                if "Map:" in name or "Cxn:" in name:
+                    continue
+                if any((x in name for x in (':I', ':O', ':C', ':S'))):
+                    io_tag = True
+                    mod = name.split(":")
+                    mod_name = mod[0]
+                    if mod_name not in self._info["modules"]:
+                        self._info["modules"][mod_name] = {"slots": {}}
+                    if len(mod) == 3 and mod[1].isdigit():
+                        mod_slot = int(mod[1])
+                        if mod_slot not in self._info["modules"][mod_name]:
+                            self._info["modules"][mod_name]["slots"][mod_slot] = {"types": []}
+                        self._info["modules"][mod_name]["slots"][mod_slot]["types"].append(mod[2])
+                elif len(mod) == 2:
+                    if "types" not in self._info["modules"][mod_name]:
+                        self._info["modules"][mod_name]["types"] = []
+                    self._info["modules"][mod_name]["types"].append(mod[1])
+                else:
+                    if "__UNKNOWN__" not in self._info["modules"][mod_name]:
+                        self._info["modules"][mod_name]["__UNKNOWN__"] = []
+                    self._info["modules"][mod_name]["__UNKNOWN__"].append(":".join(mod[1[:None]]))
+                if not io_tag:
+                    if ":" in name or name.startswith("__"):
+                        continue
+                    if tag["symbol_type"] & 4096:
+                        continue
+                    if program is not None:
+                        name = f"Program:{program}.{name}"
+                    self._cache["tag_name:id"][name] = tag["instance_id"]
+                    user_tags.append(_create_tag(name, tag))
+
+            return user_tags
+        except Exception as err:
+            try:
+                raise DataError("failed isolating user tags") from err
+            finally:
+                err = None
+                del err
+
+    def _get_structure_makeup(self, instance_id):
+        """
+        get the structure makeup for a specific structure
+        """
+        if instance_id not in self._cache["id:struct"]:
+            request = self.new_request("send_unit_data")
+            req_path = request_path(ClassCode.template_object, Pack.uint(instance_id))
+            request.add(CommonService.get_attribute_list, req_path, b'\x04\x00', b'\x04\x00', b'\x05\x00', b'\x02\x00', b'\x01\x00')
+            response = request.send()
+            if not response:
+                raise DataError("send_unit_data returned not valid data", response.error)
+            _struct = _parse_structure_makeup_attributes(response)
+            self._cache["id:struct"][instance_id] = _struct
+            self._cache["handle:id"][_struct["structure_handle"]] = instance_id
+        return self._cache["id:struct"][instance_id]
+
+    def _read_template(self, instance_id, object_definition_size):
+        """ get a list of the tags in the plc
+
+        """
+        offset = 0
+        template_raw = b''
+        try:
+            while True:
+                request = self.new_request("send_unit_data")
+                req_path = request_path((ClassCode.template_object), instance=(Pack.uint(instance_id)))
+                request.add(TagService.read_tag, req_path, Pack.dint(offset), Pack.uint(object_definition_size * 4 - 21 - offset))
+                response = request.send()
+                if response.service_status not in (SUCCESS, INSUFFICIENT_PACKETS):
+                    raise DataError("Error reading template", response)
+                template_raw += response.data
+                if response.service_status == SUCCESS:
+                    break
+                offset += len(response.data)
+
+        except Exception as err:
+            try:
+                raise DataError("Failed to read template") from err
+            finally:
+                err = None
+                del err
+
+        else:
+            return template_raw
+
+    def _parse_template_data(self, data, member_count):
+        info_len = member_count * TEMPLATE_MEMBER_INFO_LEN
+        info_data = data[None[:info_len]]
+        member_data = [self._parse_template_data_member_info(info) for info in (info_data[i[:i + TEMPLATE_MEMBER_INFO_LEN]] for i in range(0, info_len, TEMPLATE_MEMBER_INFO_LEN))]
+        member_names = []
+        template_name = None
+        try:
+            for name in (x.decode(errors="replace") for x in data[info_len[:None]].split(b'\x00') if len(x)):
+                if template_name is None and ";" in name:
+                    template_name, _ = name.split(";", maxsplit=1)
+                else:
+                    member_names.append(name)
+
+        except (ValueError, UnicodeDecodeError) as err:
+            try:
+                raise DataError("Unable to decode template or member names") from err
+            finally:
+                err = None
+                del err
+
+        predefine = template_name is None
+        if predefine:
+            template_name = member_names.pop(0)
+        if template_name == "ASCIISTRING82":
+            template_name = "STRING"
+        template = {'name':template_name, 
+         'internal_tags':{},  'attributes':[]}
+        for member, info in zip(member_names, member_data):
+            if not member.startswith("ZZZZZZZZZZ"):
+                if not member.startswith("__"):
+                    template["attributes"].append(member)
+                template["internal_tags"][member] = info
+
+        if template["attributes"] == ["LEN", "DATA"]:
+            if template["internal_tags"]["DATA"]["data_type"] == "SINT":
+                if template["internal_tags"]["DATA"].get("array"):
+                    template["string"] = template["internal_tags"]["DATA"]["array"]
+        return template
+
+    def _parse_template_data_member_info(self, info):
+        type_info = Unpack.uint(info[None[:2]])
+        typ = Unpack.uint(info[2[:4]])
+        member = {"offset": (Unpack.udint(info[4[:None]]))}
+        tag_type = "atomic"
+        data_type = DataType.get(typ)
+        if data_type is None:
+            instance_id = typ & 4095
+            data_type = DataType.get(instance_id)
+        elif data_type is None:
+            tag_type = "struct"
+            data_type = self._get_data_type(instance_id)
+        member["tag_type"] = tag_type
+        member["data_type"] = data_type
+        if data_type == "BOOL":
+            member["bit"] = type_info
+        else:
+            if data_type is not None:
+                member["array"] = type_info
+        return member
+
+    def _get_data_type(self, instance_id):
+        if instance_id not in self._cache["id:udt"]:
+            try:
+                template = self._get_structure_makeup(instance_id)
+                if not template.get("error"):
+                    _data = self._read_template(instance_id, template["object_definition_size"])
+                    data_type = self._parse_template_data(_data, template["member_count"])
+                    data_type["template"] = template
+                    self._cache["id:udt"][instance_id] = data_type
+                    self._data_types[data_type["name"]] = data_type
+            except Exception as err:
+                try:
+                    raise DataError("Failed to get data type information") from err
+                finally:
+                    err = None
+                    del err
+
+        return self._cache["id:udt"][instance_id]
+
+    @with_forward_open
+    def read(self, *tags: str) -> ReadWriteReturnType:
+        """
+        Read the value of tag(s).  Automatically will split tags into multiple requests by tracking the request and
+        response size.  Will use the multi-service request to group many tags into a single packet and also will automatically
+        use fragmented read requests if the response size will not fit in a single packet.  Supports arrays (specify element
+        count in using curly braces (array{10}).  Also supports full structure reading (when possible), return value
+        will be a dict of {attribute name: value}.
+
+        :param tags: one or many tags to read
+        :return: a single or list of ``Tag`` objects
+        """
+        parsed_requests = self._parse_requested_tags(tags)
+        requests = self._read_build_requests(parsed_requests)
+        read_results = self._send_requests(requests)
+        results = []
+        for tag in tags:
+            try:
+                request_data = parsed_requests[tag]
+                result = read_results[(request_data["plc_tag"], request_data["elements"])]
+                if request_data.get("bit") is None:
+                    results.append(result)
+                else:
+                    if result:
+                        typ, bit = request_data["bit"]
+                        val = bool(result.value & 1 << bit) if typ == "bit" else result.value[bit % 32]
+                        results.append(Tag(tag, val, "BOOL", None))
+                    else:
+                        results.append(Tag(tag, None, None, result.error))
+            except Exception as err:
+                try:
+                    results.append(Tag(tag, None, None, f"Invalid tag request - {err}"))
+                finally:
+                    err = None
+                    del err
+
+        if len(tags) > 1:
+            return results
+        return results[0]
+
+    def _read_build_requests(self, parsed_tags):
+        if len(parsed_tags) == 1 or self._micro800:
+            requests = (self._read_build_single_request(parsed_tags[tag]) for tag in parsed_tags)
+            return [r for r in requests if r is not None]
+        return self._read_build_multi_requests(parsed_tags)
+
+    def _read_build_multi_requests(self, parsed_tags):
+        """
+        creates a list of multi-request packets
+        """
+        requests = []
+        response_size = MULTISERVICE_READ_OVERHEAD
+        current_request = self.new_request("multi_request")
+        requests.append(current_request)
+        tags_in_requests = set()
+        for tag, tag_data in parsed_tags.items():
+            if tag_data.get("error") is None and (tag_data["plc_tag"], tag_data["elements"]) not in tags_in_requests:
+                tags_in_requests.add((tag_data["plc_tag"], tag_data["elements"]))
+                return_size = _tag_return_size(tag_data)
+                if return_size > self.connection_size:
+                    _request = self.new_request("read_tag_fragmented")
+                    _request.add(tag_data["plc_tag"], tag_data["elements"], tag_data["tag_info"])
+                    requests.append(_request)
+                else:
+                    try:
+                        return_size += 2
+                        if response_size + return_size < self.connection_size:
+                            if current_request.add_read(tag_data["plc_tag"], tag_data["elements"], tag_data["tag_info"]):
+                                response_size += return_size
+                            else:
+                                response_size = return_size + MULTISERVICE_READ_OVERHEAD
+                                current_request = self.new_request("multi_request")
+                                current_request.add_read(tag_data["plc_tag"], tag_data["elements"], tag_data["tag_info"])
+                                requests.append(current_request)
+                        else:
+                            response_size = return_size + MULTISERVICE_READ_OVERHEAD
+                            current_request = self.new_request("multi_request")
+                            current_request.add_read(tag_data["plc_tag"], tag_data["elements"], tag_data["tag_info"])
+                            requests.append(current_request)
+                    except RequestError:
+                        self._LogixDriver__log.exception(f"Failed to build request for {tag} - skipping")
+                        continue
+
+            else:
+                self._LogixDriver__log.error(f'Skipping making request for {tag}, error: {tag_data.get("error")}')
+                continue
+
+        return (r for r in requests if r.type_ == "multi" and r.tags or r.type_ == "read")
+
+    def _read_build_single_request(self, parsed_tag):
+        """
+        creates a single read_tag request packet
+        """
+        if parsed_tag.get("error") is None:
+            return_size = _tag_return_size(parsed_tag)
+            if return_size > self.connection_size:
+                request = self.new_request("read_tag_fragmented")
+            else:
+                request = self.new_request("read_tag")
+            request.add(parsed_tag["plc_tag"], parsed_tag["elements"], parsed_tag["tag_info"])
+            return request
+        self._LogixDriver__log.error(f'Skipping making request, error: {parsed_tag["error"]}')
+
+    @with_forward_open
+    def write(self, *tags_values: Tuple[(str, TagValueType)]) -> ReadWriteReturnType:
+        """
+        Write to tag(s). Automatically will split tags into multiple requests by tracking the request and
+        response size.  Will use the multi-service request to group many tags into a single packet and also will automatically
+        use fragmented read requests if the response size will not fit in a single packet.  Supports arrays (specify element
+        count in using curly braces (array{10}).  Also supports full structure writing (when possible), value must be a
+        sequence of values matching the exact structure of the destination tag.
+
+        :param tags_values: one or many 2-element tuples (tag name, value)
+        :return: a single or list of ``Tag`` objects.
+        """
+        tags = (tag for tag, value in tags_values)
+        parsed_requests = self._parse_requested_tags(tags)
+        normal_tags = set()
+        bit_tags = set()
+        for tag, value in tags_values:
+            parsed_requests[tag]["value"] = value
+            if parsed_requests[tag].get("bit") is None:
+                normal_tags.add(tag)
+            else:
+                bit_tags.add(tag)
+
+        requests, bit_writes = self._write_build_requests(parsed_requests)
+        write_results = self._send_requests(requests)
+        results = []
+        for tag, value in tags_values:
+            try:
+                request_data = parsed_requests[tag]
+                bit = parsed_requests[tag].get("bit")
+                result = write_results[(request_data["plc_tag"], request_data["elements"])]
+                if request_data["elements"] > 1:
+                    result = result._replace(type=f'{result.type}[{request_data["elements"]}]')
+                elif bit is not None:
+                    result = result._replace(tag=tag, type="BOOL", value=value)
+                else:
+                    result = result._replace(tag=(request_data["plc_tag"]), value=value)
+                results.append(result)
+            except Exception as err:
+                try:
+                    results.append(Tag(tag, None, None, f"Invalid tag request - {err}"))
+                finally:
+                    err = None
+                    del err
+
+        if len(tags_values) > 1:
+            return results
+        return results[0]
+
+    def _write_build_requests(self, parsed_tags):
+        bit_writes = {}
+        if len(parsed_tags) == 1 or self._micro800:
+            requests = (self._write_build_single_request(parsed_tags[tag], bit_writes) for tag in parsed_tags)
+            return ([r for r in requests if r is not None], bit_writes)
+        return (self._write_build_multi_requests(parsed_tags, bit_writes), bit_writes)
+
+    def _write_build_multi_requests(self, parsed_tags, bit_writes):
+        requests = []
+        current_request = self.new_request("multi_request")
+        requests.append(current_request)
+        tags_in_requests = set()
+        for tag, tag_data in parsed_tags.items():
+            if tag_data.get("error") is None:
+                if (tag_data["plc_tag"], tag_data["elements"]) not in tags_in_requests:
+                    tags_in_requests.add((tag_data["plc_tag"], tag_data["elements"]))
+                    if _bit_request(tag_data, bit_writes):
+                        continue
+                    tag_data["write_value"] = writable_value(tag_data)
+                    if len(tag_data["write_value"]) > self.connection_size:
+                        _request = self.new_request("write_tag_fragmented")
+                        _request.add(tag_data["plc_tag"], tag_data["value"], tag_data["elements"], tag_data["tag_info"])
+                        requests.append(_request)
+                        continue
+                try:
+                    if not current_request.add_write(tag_data["plc_tag"], tag_data["write_value"], tag_data["elements"], tag_data["tag_info"]):
+                        current_request = self.new_request("multi_request")
+                        requests.append(current_request)
+                        current_request.add_write(tag_data["plc_tag"], tag_data["write_value"], tag_data["elements"], tag_data["tag_info"])
+                except RequestError:
+                    self._LogixDriver__log.exception(f"Failed to build request for {tag} - skipping")
+                    continue
+
+        if bit_writes:
+            for tag in bit_writes:
+                try:
+                    value = (
+                     bit_writes[tag]["or_mask"], bit_writes[tag]["and_mask"])
+                    if not current_request.add_write(tag, value, tag_info=(bit_writes[tag]["tag_info"]), bits_write=True):
+                        current_request = self.new_request("multi_request")
+                        requests.append(current_request)
+                        current_request.add_write(tag, value, tag_info=(bit_writes[tag]["tag_info"]), bits_write=True)
+                except RequestError:
+                    self._LogixDriver__log.exception(f"Failed to build request for {tag} - skipping")
+                    continue
+
+        return (r for r in requests if r.type_ == "multi" and r.tags or r.type_ == "write")
+
+    def _write_build_single_request(self, parsed_tag, bit_writes):
+        if parsed_tag.get("error") is None:
+            if not _bit_request(parsed_tag, bit_writes):
+                parsed_tag["write_value"] = writable_value(parsed_tag)
+                if len(parsed_tag["write_value"]) > self.connection_size:
+                    request = self.new_request("write_tag_fragmented")
+                else:
+                    request = self.new_request("write_tag")
+                request.add(parsed_tag["plc_tag"], parsed_tag["write_value"], parsed_tag["elements"], parsed_tag["tag_info"])
+                return request
+            try:
+                tag = parsed_tag["plc_tag"]
+                value = (bit_writes[tag]["or_mask"], bit_writes[tag]["and_mask"])
+                request = self.new_request("write_tag")
+                request.add(tag, value, tag_info=(bit_writes[tag]["tag_info"]), bits_write=True)
+                return request
+            except RequestError:
+                self._LogixDriver__log.exception(f"Failed to build request for {tag} - skipping")
+                return
+
+        else:
+            self._LogixDriver__log.error(f'Skipping making request, error: {parsed_tag["error"]}')
+            return
+
+    def _get_tag_info(self, base, attrs) -> Optional[dict]:
+
+        def _recurse_attrs(attrs, data):
+            cur, *remain = attrs
+            curr_tag = _strip_array(cur)
+            if not len(remain):
+                return data.get(curr_tag)
+            if curr_tag in data:
+                return _recurse_attrs(remain, data[curr_tag]["data_type"]["internal_tags"])
+            return
+
+        try:
+            tag_name = _strip_array(base)
+            for name in list(self._tags.keys()):
+                if name.lower() == tag_name.lower():
+                    tag_name = name
+                    break
+
+            data = self._tags.get(tag_name)
+            if not len(attrs):
+                return data
+            return _recurse_attrs(attrs, data["data_type"]["internal_tags"])
+        except Exception as err:
+            try:
+                _msg = f"Failed to lookup tag data for {base}, {attrs}"
+                self._LogixDriver__log.exception(_msg)
+                raise RequestError(_msg) from err
+            finally:
+                err = None
+                del err
+
+    def _parse_requested_tags(self, tags):
+        requests = {}
+        for tag in tags:
+            parsed = {}
+            try:
+                try:
+                    parsed_request = self._parse_tag_request(tag)
+                    if parsed_request is not None:
+                        plc_tag, bit, elements, tag_info = parsed_request
+                        parsed["plc_tag"] = plc_tag
+                        parsed["bit"] = bit
+                        parsed["elements"] = elements
+                        parsed["tag_info"] = tag_info
+                    else:
+                        parsed["error"] = "Failed to parse tag request"
+                except RequestError as err:
+                    try:
+                        parsed["error"] = str(err)
+                    finally:
+                        err = None
+                        del err
+
+            finally:
+                requests[tag] = parsed
+
+        return requests
+
+    def _parse_tag_request(self, tag: str) -> Optional[Tuple[(str, Optional[int], int, dict)]]:
+        try:
+            if tag.endswith("}"):
+                if "{" in tag:
+                    tag, _tmp = tag.split("{")
+                    elements = int(_tmp[None[:-1]])
+                else:
+                    elements = 1
+                bit = None
+                base, *attrs = tag.split(".")
+                if base.startswith("Program:"):
+                    base = f"{base}.{attrs.pop(0)}"
+            elif len(attrs):
+                if attrs[-1].isdigit():
+                    _bit = attrs.pop(-1)
+                    bit = ("bit", int(_bit))
+                    tag = base if not len(attrs) else f'{base}.{"".join(attrs)}'
+            tag_info = self._get_tag_info(base, attrs)
+            if tag_info["data_type"] == "DWORD" and elements == 1:
+                _tag, idx = _get_array_index(tag)
+                tag = f"{_tag}[{idx // 32}]"
+                bit = ("bool_array", idx)
+            return (tag, bit, elements, tag_info)
+        except Exception as err:
+            try:
+                raise RequestError("Failed to parse tag request", tag)
+            finally:
+                err = None
+                del err
+
+    def _send_requests(self, requests):
+
+        def _mkkey(t=None, r=None):
+            if t is not None:
+                return (
+                 t["tag"], t["elements"])
+            return (r.tag, r.elements)
+
+        results = {}
+        for request in requests:
+            try:
+                response = request.send()
+            except (RequestError, DataError) as err:
+                try:
+                    self._LogixDriver__log.exception("Error sending request")
+                    if request.type_ != "multi":
+                        results[_mkkey(r=request)] = Tag(request.tag, None, None, str(err))
+                    else:
+                        for tag in request.tags:
+                            results[_mkkey(t=tag)] = Tag(tag["tag"], None, None, str(err))
+
+                finally:
+                    err = None
+                    del err
+
+            if request.type_ != "multi":
+                if response:
+                    results[_mkkey(r=request)] = Tag(request.tag, response.value if request.type_ == "read" else request.value, response.data_type if request.type_ == "read" else request.data_type, response.error)
+                else:
+                    results[_mkkey(r=request)] = Tag(request.tag, None, None, response.error)
+            else:
+                for tag in response.tags:
+                    if tag["service_status"] == SUCCESS:
+                        results[_mkkey(t=tag)] = Tag(tag["tag"], tag["value"], tag["data_type"], None)
+                    else:
+                        results[_mkkey(t=tag)] = Tag(tag["tag"], None, None, tag.get("error", "Unknown Service Error"))
+
+        return results
+
+    def get_plc_time(self, fmt: str='%A, %B %d, %Y %I:%M:%S%p') -> Tag:
+        """
+        Gets the current time of the PLC system clock. The ``value`` attribute will be a dict containing the time in
+        3 different forms, *datetime* is a Python datetime.datetime object, *microseconds* is the integer value epoch time,
+        and *string* is the *datetime* formatted using ``strftime`` and the ``fmt`` parameter.
+
+        :param fmt: format string for converting the time to a string
+        :return: a Tag object with the current time
+        """
+        tag = self.generic_message(service=(CommonService.get_attribute_list),
+          class_code=(ClassCode.wall_clock_time),
+          instance=b'\x01',
+          request_data=b'\x01\x00\x0b\x00',
+          data_format=[
+         (None, 6), ('us', 'ULINT')])
+        if tag:
+            _time = datetime.datetime(1970, 1, 1) + datetime.timedelta(microseconds=(tag.value["us"]))
+            value = {'datetime':_time,  'microseconds':tag.value["us"],  'string':(_time.strftime)(fmt)}
+        else:
+            value = None
+        return Tag("__GET_PLC_TIME__", value, None, error=(tag.error))
+
+    def set_plc_time(self, microseconds: Optional[int]=None) -> Tag:
+        """
+        Set the time of the PLC system clock.
+
+        :param microseconds: None to use client PC clock, else timestamp in microseconds to set the PLC clock to
+        :return: Tag with status of request
+        """
+        if microseconds is None:
+            microseconds = int(time.time() * SEC_TO_US)
+        request_data = (b'').join([
+         b'\x01\x00',
+         b'\x06\x00',
+         Pack.ulint(microseconds)])
+        return self.generic_message(service=(CommonService.set_attribute_list),
+          class_code=(ClassCode.wall_clock_time),
+          instance=b'\x01',
+          request_data=request_data,
+          name="__SET_PLC_TIME__")
+
+
+def _parse_plc_name(data):
+    try:
+        name_len = Unpack.uint(data[6[:8]])
+        return data[8[:8 + name_len]].decode()
+    except Exception as err:
+        try:
+            raise DataError("failed parsing plc name") from err
+        finally:
+            err = None
+            del err
+
+
+def _parse_plc_info(data):
+    parsed = {k: v for k, v in data.items() if not k.startswith("_")}
+    parsed["vendor"] = VENDORS.get(parsed["vendor"], "UNKNOWN")
+    parsed["product_type"] = PRODUCT_TYPES.get(parsed["product_type"], "UNKNOWN")
+    parsed["revision"] = f'{parsed["version_major"]}.{parsed["version_minor"]}'
+    parsed["serial"] = f'{parsed["serial"]:08x}'
+    parsed["keyswitch"] = KEYSWITCH.get(data["_keyswitch"][0], {}).get(data["_keyswitch"][1], "UNKNOWN")
+    return parsed
+
+
+def _parse_structure_makeup_attributes(response):
+    """ extract the tags list from the message received"""
+    structure = {}
+    if response.service_status != SUCCESS:
+        structure["error"] = response.service_status
+        return
+    attribute = response.data
+    idx = 4
+    try:
+        if Unpack.uint(attribute[idx[:idx + 2]]) == SUCCESS:
+            idx += 2
+            structure["object_definition_size"] = Unpack.dint(attribute[idx[:idx + 4]])
+        else:
+            structure["error"] = "object_definition Error"
+            return structure
+            idx += 6
+            if Unpack.uint(attribute[idx[:idx + 2]]) == SUCCESS:
+                idx += 2
+                structure["structure_size"] = Unpack.dint(attribute[idx[:idx + 4]])
+            else:
+                structure["error"] = "structure Error"
+                return structure
+                idx += 6
+                if Unpack.uint(attribute[idx[:idx + 2]]) == SUCCESS:
+                    idx += 2
+                    structure["member_count"] = Unpack.uint(attribute[idx[:idx + 2]])
+                else:
+                    structure["error"] = "member_count Error"
+                    return structure
+                    idx += 4
+                    if Unpack.uint(attribute[idx[:idx + 2]]) == SUCCESS:
+                        idx += 2
+                        structure["structure_handle"] = Unpack.uint(attribute[idx[:idx + 2]])
+                    else:
+                        structure["error"] = "structure_handle Error"
+                        return structure
+        return structure
+    except Exception as err:
+        try:
+            raise DataError("failed to parse structure attributes") from err
+        finally:
+            err = None
+            del err
+
+
+def writable_value(parsed_tag):
+    if isinstance(parsed_tag["value"], bytes):
+        return parsed_tag["value"]
+    try:
+        value = parsed_tag["value"]
+        elements = parsed_tag["elements"]
+        data_type = parsed_tag["tag_info"]["data_type"]
+        if elements > 1:
+            if len(value) < elements:
+                raise RequestError(f"Insufficient data for requested elements, expected {elements} and got {len(value)}")
+            if len(value) > elements:
+                value = value[None[:elements]]
+        if parsed_tag["tag_info"]["tag_type"] == "struct":
+            return _writable_value_structure(value, elements, data_type)
+        pack_func = Pack[data_type]
+        if elements > 1:
+            return (b'').join((pack_func(value[i]) for i in range(elements)))
+        return pack_func(value)
+    except Exception as err:
+        try:
+            raise RequestError("Unable to create a writable value") from err
+        finally:
+            err = None
+            del err
+
+
+def _strip_array(tag):
+    if "[" in tag:
+        return tag[None[:tag.find("[")]]
+    return tag
+
+
+def _get_array_index(tag):
+    if tag.endswith("]") and "[" in tag:
+        tag, _tmp = tag.split("[")
+        idx = int(_tmp[None[:-1]])
+    else:
+        idx = 0
+    return (tag, idx)
+
+
+def _tag_return_size(tag_data):
+    tag_info = tag_data["tag_info"]
+    if tag_info["tag_type"] == "atomic":
+        size = DataTypeSize[tag_info["data_type"]]
+    else:
+        size = tag_info["data_type"]["template"]["structure_size"]
+    size = size * tag_data["elements"] + READ_RESPONSE_OVERHEAD
+    return size
+
+
+def _writable_value_structure(value, elements, data_type):
+    if elements > 1:
+        return (b'').join((_pack_structure(val, data_type) for val in value))
+    return _pack_structure(value, data_type)
+
+
+def _pack_string(value, string_len, struct_size):
+    try:
+        sint_array = [b'\x00' for _ in range(struct_size - 4)]
+        if len(value) > string_len:
+            value = value[None[:string_len]]
+        for i, s in enumerate(value):
+            sint_array[i] = Pack.char(s)
+
+    except Exception as err:
+        try:
+            raise RequestError("Failed to pack string") from err
+        finally:
+            err = None
+            del err
+
+    return Pack.dint(len(value)) + (b'').join(sint_array)
+
+
+def _pack_structure(value, data_type):
+    string_len = data_type.get("string")
+    if string_len:
+        data = _pack_string(value, string_len, data_type["template"]["structure_size"])
+    else:
+        data = [0 for _ in range(data_type["template"]["structure_size"])]
+        try:
+            for val, attr in zip(value, data_type["attributes"]):
+                dtype = data_type["internal_tags"][attr]
+                offset = dtype["offset"]
+                ary = dtype.get("array")
+                if dtype["tag_type"] == "struct":
+                    if ary:
+                        value_bytes = [_pack_structure(val[i], dtype["data_type"]) for i in range(ary)]
+                    else:
+                        value_bytes = [
+                         _pack_structure(val, dtype["data_type"])]
+                else:
+                    pack_func = Pack[dtype["data_type"]]
+                    bit = dtype.get("bit")
+                    if bit is not None:
+                        if val:
+                            data[offset] |= 1 << bit
+                        else:
+                            data[offset] &= ~(1 << bit)
+                            continue
+                    elif ary:
+                        value_bytes = [pack_func(val[i]) for i in range(ary)]
+                    else:
+                        value_bytes = [
+                         pack_func(val)]
+                val_bytes = list(itertools.chain.from_iterable(value_bytes))
+                data[offset[:offset + len(val_bytes)]] = val_bytes
+
+        except Exception as err:
+            try:
+                raise RequestError("Value Invalid for Structure") from err
+            finally:
+                err = None
+                del err
+
+        return bytes(data)
+
+
+def _pad(data):
+    return data + b'\x00' * (len(data) % 4)
+
+
+def _bit_request(tag_data, bit_requests):
+    if tag_data.get("bit") is None:
+        return
+    elif tag_data["plc_tag"] not in bit_requests:
+        bit_requests[tag_data["plc_tag"]] = {'and_mask':4294967295L, 
+         'or_mask':0, 
+         'bits':[],  'tag_info':tag_data["tag_info"]}
+    bits_ = bit_requests[tag_data["plc_tag"]]
+    typ_, bit = tag_data["bit"]
+    bits_["bits"].append(bit)
+    if typ_ == "bool_array":
+        bit = bit % 32
+    if tag_data["value"]:
+        bits_["or_mask"] |= 1 << bit
+    else:
+        bits_["and_mask"] &= ~(1 << bit)
+    return True
+
+
+def _create_tag(name, raw_tag):
+    new_tag = {'tag_name':name, 
+     'dim':(raw_tag["symbol_type"] & 24576) >> 13, 
+     'instance_id':raw_tag["instance_id"], 
+     'symbol_address':raw_tag["symbol_address"], 
+     'symbol_object_address':raw_tag["symbol_object_address"], 
+     'software_control':raw_tag["software_control"], 
+     'alias':False if (raw_tag["software_control"] & BASE_TAG_BIT) else True, 
+     'external_access':raw_tag["external_access"], 
+     'dimensions':raw_tag["dimensions"]}
+    if raw_tag["symbol_type"] & 32768:
+        template_instance_id = raw_tag["symbol_type"] & 4095
+        new_tag["tag_type"] = "struct"
+        new_tag["template_instance_id"] = template_instance_id
+    else:
+        new_tag["tag_type"] = "atomic"
+        datatype = raw_tag["symbol_type"] & 255
+        new_tag["data_type"] = DataType.get(datatype)
+        if datatype == DataType.bool:
+            new_tag["bit_position"] = (raw_tag["symbol_type"] & 1792) >> 8
+    return new_tag
diff --git a/APPS_UNCOMPILED/lib/pycomm3/clx_legacy.py b/APPS_UNCOMPILED/lib/pycomm3/clx_legacy.py
new file mode 100644
index 0000000..6861005
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/clx_legacy.py
@@ -0,0 +1,822 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/clx_legacy.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 35947 bytes
+import struct
+from collections import defaultdict
+from types import GeneratorType
+from . import DataError, Tag, CommError
+from .bytes_ import pack_dint, pack_uint, pack_udint, pack_usint, unpack_usint, unpack_uint, unpack_dint, UNPACK_DATA_FUNCTION, PACK_DATA_FUNCTION, DATA_FUNCTION_SIZE, print_bytes_msg
+from .clx import LogixDriver
+from .const import SUCCESS, EXTENDED_SYMBOL, ENCAPSULATION_COMMAND, DATA_TYPE, BITS_PER_INT_TYPE, REPLY_INFO, TAG_SERVICES_REQUEST, PADDING_BYTE, ELEMENT_TYPE, DATA_ITEM, ADDRESS_ITEM, CLASS_TYPE, CLASS_CODE, INSTANCE_TYPE, INSUFFICIENT_PACKETS, REPLY_START, MULTISERVICE_READ_OVERHEAD, MULTISERVICE_WRITE_OVERHEAD, TAG_SERVICES_REPLY, get_service_status, get_extended_status
+
+class LogixDriverLegacy(LogixDriver):
+
+    def _send(self, message):
+        """
+        socket send
+        :return: true if no error otherwise false
+        """
+        try:
+            if self.debug:
+                self._LogixDriverLegacy__log.debug(print_bytes_msg(message, ">>> SEND >>>"))
+            self._sock.send(message)
+        except Exception as e:
+            try:
+                raise CommError(e)
+            finally:
+                e = None
+                del e
+
+    def _receive(self):
+        """
+        socket receive
+        :return: reply data
+        """
+        try:
+            reply = self._sock.receive()
+        except Exception as e:
+            try:
+                raise CommError(e)
+            finally:
+                e = None
+                del e
+
+        else:
+            if self.debug:
+                self._LogixDriverLegacy__log.debug(print_bytes_msg(reply, "<<< RECEIVE <<<"))
+            return reply
+
+    def _create_tag_rp(self, tag):
+        """ Creates a request pad
+
+        It returns the request packed wrapped around the tag passed.
+        If any error it returns none
+        """
+        tags = tag.split(".")
+        if tags:
+            base, *attrs = tags
+            if self.use_instance_ids and base in self.tags:
+                rp = [
+                 CLASS_TYPE["8-bit"],
+                 CLASS_CODE["Symbol Object"],
+                 INSTANCE_TYPE["16-bit"], b'\x00',
+                 pack_uint(self.tags[base]["instance_id"])]
+            else:
+                base_tag, index = self._find_tag_index(base)
+                base_len = len(base_tag)
+                rp = [EXTENDED_SYMBOL,
+                 pack_usint(base_len),
+                 base_tag]
+                if base_len % 2:
+                    rp.append(PADDING_BYTE)
+                if index is None:
+                    return
+                rp += index
+            for attr in attrs:
+                attr, index = self._find_tag_index(attr)
+                tag_length = len(attr)
+                attr_path = [
+                 EXTENDED_SYMBOL,
+                 pack_usint(tag_length),
+                 attr]
+                if tag_length % 2:
+                    attr_path.append(PADDING_BYTE)
+                if index is None:
+                    return
+                attr_path += index
+                rp += attr_path
+
+            request_path = (b'').join(rp)
+            request_path = bytes([len(request_path) // 2]) + request_path
+            return request_path
+
+    def _check_reply(self, reply):
+        """ check the replayed message for error
+
+            return the status error if unsuccessful, else None
+        """
+        try:
+            if reply is None:
+                return f"{REPLY_INFO[unpack_dint(reply[None[:2]])]} without reply"
+                typ = unpack_uint(reply[None[:2]])
+                if unpack_dint(reply[8[:12]]) != SUCCESS:
+                    return get_service_status(unpack_dint(reply[8[:12]]))
+                if typ == unpack_uint(ENCAPSULATION_COMMAND["send_rr_data"]):
+                    status = unpack_usint(reply[42[:43]])
+                    if status != SUCCESS:
+                        return f"send_rr_data reply:{get_service_status(status)} - Extend status:{get_extended_status(reply, 42)}"
+                    return
+            elif typ == unpack_uint(ENCAPSULATION_COMMAND["send_unit_data"]):
+                service = reply[46]
+                status = _unit_data_status(reply)
+                if status == INSUFFICIENT_PACKETS:
+                    if service in (TAG_SERVICES_REPLY["Read Tag"],
+                     TAG_SERVICES_REPLY["Multiple Service Packet"],
+                     TAG_SERVICES_REPLY["Read Tag Fragmented"],
+                     TAG_SERVICES_REPLY["Write Tag Fragmented"],
+                     TAG_SERVICES_REPLY["Get Instance Attributes List"],
+                     TAG_SERVICES_REPLY["Get Attributes"]):
+                        return
+                if status == SUCCESS:
+                    return
+                return f"send_unit_data reply:{get_service_status(status)} - Extend status:{get_extended_status(reply, 48)}"
+        except Exception as e:
+            try:
+                raise DataError(e)
+            finally:
+                e = None
+                del e
+
+    def read_tag(self, *tags):
+        """ read tag from a connected plc
+
+        Possible combination can be passed to this method:
+                - ('Counts') a single tag name
+                - (['ControlWord']) a list with one tag or many
+                - (['parts', 'ControlWord', 'Counts'])
+
+        At the moment there is not a strong validation for the argument passed. The user should verify
+        the correctness of the format passed.
+
+        :return: None is returned in case of error otherwise the tag list is returned
+        """
+        if not self._forward_open():
+            self._LogixDriverLegacy__log.warning("Target did not connected. read_tag will not be executed.")
+            raise DataError("Target did not connected. read_tag will not be executed.")
+        elif len(tags) == 1:
+            if isinstance(tags[0], (list, tuple, GeneratorType)):
+                return self._read_tag_multi(tags[0])
+            return self._read_tag_single(tags[0])
+        else:
+            return self._read_tag_multi(tags)
+
+    def _read_tag_multi(self, tags):
+        tag_bits = defaultdict(list)
+        rp_list, tags_read = [[]], [[]]
+        request_len = 0
+        for tag in tags:
+            tag, bit = self._prep_bools(tag, "BOOL", bits_only=True)
+            read = bit is None or tag not in tag_bits
+            if bit is not None:
+                tag_bits[tag].append(bit)
+            if read:
+                rp = self._create_tag_rp(tag)
+                if rp is None:
+                    raise DataError(f"Cannot create tag {tag} request packet. read_tag will not be executed.")
+                else:
+                    tag_req_len = len(rp) + MULTISERVICE_READ_OVERHEAD
+                    if tag_req_len + request_len >= self.connection_size:
+                        rp_list.append([])
+                        tags_read.append([])
+                        request_len = 0
+                    rp_list[-1].append(bytes([TAG_SERVICES_REQUEST["Read Tag"]]) + rp + b'\x01\x00')
+                    tags_read[-1].append(tag)
+                    request_len += tag_req_len
+
+        replies = []
+        for req_list, tags_ in zip(rp_list, tags_read):
+            message_request = self.build_multiple_service(req_list, self._sequence())
+            msg = self.build_common_packet_format((DATA_ITEM["Connected"]), ((b'').join(message_request)), (ADDRESS_ITEM["Connection Based"]),
+              addr_data=(self._target_cid))
+            print(msg)
+            success, reply = self.send_unit_data(msg)
+            if not success:
+                raise DataError(f"send_unit_data returned not valid data - {reply}")
+            replies += self._parse_multiple_request_read(reply, tags_, tag_bits)
+
+        return replies
+
+    def _read_tag_single(self, tag):
+        tag, bit = self._prep_bools(tag, "BOOL", bits_only=True)
+        rp = self._create_tag_rp(tag)
+        if rp is None:
+            self._LogixDriverLegacy__log.warning(f"Cannot create tag {tag} request packet. read_tag will not be executed.")
+            return
+            message_request = [
+             pack_uint(self._sequence()),
+             bytes([TAG_SERVICES_REQUEST["Read Tag"]]),
+             rp,
+             b'\x01\x00']
+            request = self.build_common_packet_format((DATA_ITEM["Connected"]), ((b'').join(message_request)), (ADDRESS_ITEM["Connection Based"]),
+              addr_data=(self._target_cid))
+            success, reply = self.send_unit_data(request)
+            if success:
+                data_type = unpack_uint(reply[50[:52]])
+                typ = DATA_TYPE[data_type]
+                try:
+                    value = UNPACK_DATA_FUNCTION[typ](reply[52[:None]])
+                    if bit is not None:
+                        value = bool(value & 1 << bit) if bit < BITS_PER_INT_TYPE[typ] else None
+                    return Tag(tag, value, typ)
+                except Exception as e:
+                    try:
+                        raise DataError(e)
+                    finally:
+                        e = None
+                        del e
+
+        else:
+            return Tag(tag, None, None, reply)
+
+    @staticmethod
+    def _parse_multiple_request_read(reply, tags, tag_bits=None):
+        """ parse the message received from a multi request read:
+
+        For each tag parsed, the information extracted includes the tag name, the value read and the data type.
+        Those information are appended to the tag list as tuple
+
+        :return: the tag list
+        """
+        offset = 50
+        position = 50
+        tag_bits = tag_bits or {}
+        try:
+            number_of_service_replies = unpack_uint(reply[offset[:offset + 2]])
+            tag_list = []
+            for index in range(number_of_service_replies):
+                position += 2
+                start = offset + unpack_uint(reply[position[:position + 2]])
+                general_status = unpack_usint(reply[(start + 2)[:start + 3]])
+                tag = tags[index]
+                if general_status == SUCCESS:
+                    typ = DATA_TYPE[unpack_uint(reply[(start + 4)[:start + 6]])]
+                    value_begin = start + 6
+                    value_end = value_begin + DATA_FUNCTION_SIZE[typ]
+                    value = UNPACK_DATA_FUNCTION[typ](reply[value_begin[:value_end]])
+                    if tag in tag_bits:
+                        for bit in tag_bits[tag]:
+                            val = bool(value & 1 << bit) if bit < BITS_PER_INT_TYPE[typ] else None
+                            tag_list.append(Tag(f"{tag}.{bit}", val, "BOOL"))
+
+                    else:
+                        tag_list.append(Tag(tag, value, typ))
+                else:
+                    tag_list.append(Tag(tag, None, None, get_service_status(general_status)))
+
+            return tag_list
+        except Exception as e:
+            try:
+                raise DataError(e)
+            finally:
+                e = None
+                del e
+
+    def read_array(self, tag, counts, raw=False):
+        """ read array of atomic data type from a connected plc
+
+        At the moment there is not a strong validation for the argument passed. The user should verify
+        the correctness of the format passed.
+
+        :param tag: the name of the tag to read
+        :param counts: the number of element to read
+        :param raw: the value should output as raw-value (hex)
+        :return: None is returned in case of error otherwise the tag list is returned
+        """
+        if not self._target_is_connected:
+            if not self._forward_open():
+                self._LogixDriverLegacy__log.warning("Target did not connected. read_tag will not be executed.")
+                raise DataError("Target did not connected. read_tag will not be executed.")
+        offset = 0
+        last_idx = 0
+        tags = b'' if raw else []
+        while offset != -1:
+            rp = self._create_tag_rp(tag)
+            if rp is None:
+                self._LogixDriverLegacy__log.warning(f"Cannot create tag {tag} request packet. read_tag will not be executed.")
+                return
+            message_request = [
+             pack_uint(self._sequence()),
+             bytes([TAG_SERVICES_REQUEST["Read Tag Fragmented"]]),
+             rp,
+             pack_uint(counts),
+             pack_dint(offset)]
+            msg = self.build_common_packet_format((DATA_ITEM["Connected"]), ((b'').join(message_request)),
+              (ADDRESS_ITEM["Connection Based"]),
+              addr_data=(self._target_cid))
+            success, reply = self.send_unit_data(msg)
+            if not success:
+                raise DataError(f"send_unit_data returned not valid data - {reply}")
+            last_idx, offset = self._parse_fragment(reply, last_idx, offset, tags, raw)
+
+        return tags
+
+    def _parse_fragment(self, reply, last_idx, offset, tags, raw=False):
+        """ parse the fragment returned by a fragment service."""
+        try:
+            status = _unit_data_status(reply)
+            data_type = unpack_uint(reply[REPLY_START[:REPLY_START + 2]])
+            fragment_returned = reply[(REPLY_START + 2)[:None]]
+        except Exception as e:
+            try:
+                raise DataError(e)
+            finally:
+                e = None
+                del e
+
+        fragment_returned_length = len(fragment_returned)
+        idx = 0
+        while idx < fragment_returned_length:
+            try:
+                typ = DATA_TYPE[data_type]
+                if raw:
+                    value = fragment_returned[idx[:idx + DATA_FUNCTION_SIZE[typ]]]
+                else:
+                    value = UNPACK_DATA_FUNCTION[typ](fragment_returned[idx[:idx + DATA_FUNCTION_SIZE[typ]]])
+                idx += DATA_FUNCTION_SIZE[typ]
+            except Exception as e:
+                try:
+                    raise DataError(e)
+                finally:
+                    e = None
+                    del e
+
+            if raw:
+                tags += value
+            else:
+                tags.append((last_idx, value))
+                last_idx += 1
+
+        if status == SUCCESS:
+            offset = -1
+        else:
+            if status == 6:
+                offset += fragment_returned_length
+            else:
+                self._LogixDriverLegacy__log.warning("{0}: {1}".format(get_service_status(status), get_extended_status(reply, 48)))
+                offset = -1
+        return (
+         last_idx, offset)
+
+    @staticmethod
+    def _prep_bools(tag, typ, bits_only=True):
+        """
+        if tag is a bool and a bit of an integer, returns the base tag and the bit value,
+        else returns the tag name and None
+
+        """
+        if typ != "BOOL":
+            return (
+             tag, None)
+            if not bits_only:
+                if tag.endswith("]"):
+                    try:
+                        base, idx = tag[None[:-1]].rsplit(sep="[", maxsplit=1)
+                        idx = int(idx)
+                        base = f"{base}[{idx // 32}]"
+                        return (base, idx)
+                    except Exception:
+                        return (
+                         tag, None)
+
+        else:
+            try:
+                base, bit = tag.rsplit(".", maxsplit=1)
+                bit = int(bit)
+                return (base, bit)
+            except Exception:
+                return (
+                 tag, None)
+
+    @staticmethod
+    def _dword_to_boolarray(tag, bit):
+        base, tmp = tag.rsplit(sep="[", maxsplit=1)
+        i = int(tmp[None[:-1]])
+        return f"{base}[{i * 32 + bit}]"
+
+    def _write_tag_multi_write(self, tags):
+        rp_list = [[]]
+        tags_added = [[]]
+        request_len = 0
+        for name, value, typ in tags:
+            name, bit = self._prep_bools(name, typ, bits_only=False)
+            rp = self._create_tag_rp(name, multi_requests=True)
+            if rp is None:
+                self._LogixDriverLegacy__log.warning(f"Cannot create tag {tags} req. packet. write_tag will not be executed")
+                return
+            try:
+                if bit is not None:
+                    rp = self._create_tag_rp(name, multi_requests=True)
+                    request = bytes([TAG_SERVICES_REQUEST["Read Modify Write Tag"]]) + rp
+                    request += (b'').join(self._make_write_bit_data(bit, value, bool_ary=("[" in name)))
+                    if typ == "BOOL" and name.endswith("]"):
+                        name = self._dword_to_boolarray(name, bit)
+                    else:
+                        name = f"{name}.{bit}"
+                else:
+                    request = bytes([TAG_SERVICES_REQUEST["Write Tag"]]) + rp + pack_uint(DATA_TYPE[typ]) + b'\x01\x00' + PACK_DATA_FUNCTION[typ](value)
+                tag_req_len = len(request) + MULTISERVICE_WRITE_OVERHEAD
+                if tag_req_len + request_len >= self.connection_size:
+                    rp_list.append([])
+                    tags_added.append([])
+                    request_len = 0
+                rp_list[-1].append(request)
+                request_len += tag_req_len
+            except (LookupError, struct.error) as e:
+                try:
+                    self._LogixDriverLegacy__warning(f"Tag:{name} type:{typ} removed from write list. Error:{e}.")
+                finally:
+                    e = None
+                    del e
+
+            else:
+                tags_added[-1].append((name, value, typ))
+
+        replies = []
+        for req_list, tags_ in zip(rp_list, tags_added):
+            message_request = self.build_multiple_service(req_list, self._sequence())
+            msg = self.build_common_packet_format((DATA_ITEM["Connected"]), ((b'').join(message_request)),
+              (ADDRESS_ITEM["Connection Based"]),
+              addr_data=(self._target_cid))
+            success, reply = self.send_unit_data(msg)
+            if success:
+                replies += self._parse_multiple_request_write(tags_, reply)
+            else:
+                raise DataError(f"send_unit_data returned not valid data - {reply}")
+
+        return replies
+
+    def _write_tag_single_write(self, tag, value, typ):
+        name, bit = self._prep_bools(tag, typ, bits_only=False)
+        rp = self._create_tag_rp(name)
+        if rp is None:
+            self._LogixDriverLegacy__log.warning(f"Cannot create tag {tag} request packet. write_tag will not be executed.")
+            return
+        else:
+            message_request = [pack_uint(self._sequence()),
+             bytes([
+              TAG_SERVICES_REQUEST["Read Modify Write Tag"] if bit is not None else TAG_SERVICES_REQUEST["Write Tag"]]),
+             rp]
+            if bit is not None:
+                try:
+                    message_request += self._make_write_bit_data(bit, value, bool_ary=("[" in name))
+                except Exception as err:
+                    try:
+                        raise DataError(f"Unable to write bit, invalid bit number {repr(err)}")
+                    finally:
+                        err = None
+                        del err
+
+            else:
+                message_request += [
+                 pack_uint(DATA_TYPE[typ]),
+                 pack_uint(1),
+                 PACK_DATA_FUNCTION[typ](value)]
+        request = self.build_common_packet_format((DATA_ITEM["Connected"]), ((b'').join(message_request)), (ADDRESS_ITEM["Connection Based"]),
+          addr_data=(self._target_cid))
+        success, reply = self.send_unit_data(request)
+        return Tag(tag, value, typ, None if success else reply)
+
+    @staticmethod
+    def _make_write_bit_data(bit, value, bool_ary=False):
+        or_mask, and_mask = (0, 4294967295L)
+        if bool_ary:
+            mask_size = 4
+            bit = bit % 32
+        else:
+            mask_size = 1 if bit < 8 else 2 if bit < 16 else 4
+        if value:
+            or_mask |= 1 << bit
+        else:
+            and_mask &= ~(1 << bit)
+        return [pack_uint(mask_size), pack_udint(or_mask)[None[:mask_size]], pack_udint(and_mask)[None[:mask_size]]]
+
+    @staticmethod
+    def _parse_multiple_request_write(tags, reply):
+        """ parse the message received from a multi request writ:
+
+        For each tag parsed, the information extracted includes the tag name and the status of the writing.
+        Those information are appended to the tag list as tuple
+
+        :return: the tag list
+        """
+        offset = 50
+        position = 50
+        try:
+            number_of_service_replies = unpack_uint(reply[offset[:offset + 2]])
+            tag_list = []
+            for index in range(number_of_service_replies):
+                position += 2
+                start = offset + unpack_uint(reply[position[:position + 2]])
+                general_status = unpack_usint(reply[(start + 2)[:start + 3]])
+                error = None if general_status == SUCCESS else get_service_status(general_status)
+                tag_list.append(Tag(*tags[index], *(error,)))
+
+            return tag_list
+        except Exception as e:
+            try:
+                raise DataError(e)
+            finally:
+                e = None
+                del e
+
+    def write_tag(self, tag, value=None, typ=None):
+        """ write tag/tags from a connected plc
+
+        Possible combination can be passed to this method:
+                - ('tag name', Value, data type)  as single parameters or inside a tuple
+                - ([('tag name', Value, data type), ('tag name2', Value, data type)]) as array of tuples
+
+        At the moment there is not a strong validation for the argument passed. The user should verify
+        the correctness of the format passed.
+
+        The type accepted are:
+            - BOOL
+            - SINT
+            - INT
+            - DINT
+            - REAL
+            - LINT
+            - BYTE
+            - WORD
+            - DWORD
+            - LWORD
+
+        :param tag: tag name, or an array of tuple containing (tag name, value, data type)
+        :param value: the value to write or none if tag is an array of tuple or a tuple
+        :param typ: the type of the tag to write or none if tag is an array of tuple or a tuple
+        :return: None is returned in case of error otherwise the tag list is returned
+        """
+        if not self._target_is_connected:
+            if not self._forward_open():
+                self._LogixDriverLegacy__log.warning("Target did not connected. write_tag will not be executed.")
+                raise DataError("Target did not connected. write_tag will not be executed.")
+        elif isinstance(tag, (list, tuple, GeneratorType)):
+            return self._write_tag_multi_write(tag)
+            if isinstance(tag, tuple):
+                name, value, typ = tag
+        else:
+            name = tag
+        return self._write_tag_single_write(name, value, typ)
+
+    def write_array(self, tag, values, data_type, raw=False):
+        """ write array of atomic data type from a connected plc
+        At the moment there is not a strong validation for the argument passed. The user should verify
+        the correctness of the format passed.
+        :param tag: the name of the tag to read
+        :param data_type: the type of tag to write
+        :param values: the array of values to write, if raw: the frame with bytes
+        :param raw: indicates that the values are given as raw values (hex)
+        """
+        if not isinstance(values, list):
+            self._LogixDriverLegacy__log.warning("A list of tags must be passed to write_array.")
+            raise DataError("A list of tags must be passed to write_array.")
+        if not self._target_is_connected:
+            if not self._forward_open():
+                self._LogixDriverLegacy__log.warning("Target did not connected. write_array will not be executed.")
+                raise DataError("Target did not connected. write_array will not be executed.")
+        array_of_values = b''
+        byte_size = 0
+        byte_offset = 0
+        for i, value in enumerate(values):
+            array_of_values += value if raw else PACK_DATA_FUNCTION[data_type](value)
+            byte_size += DATA_FUNCTION_SIZE[data_type]
+            if not byte_size >= 450:
+                if i == len(values) - 1:
+                    rp = self._create_tag_rp(tag)
+                    if rp is None:
+                        self._LogixDriverLegacy__log.warning(f"Cannot create tag {tag} request packet write_array will not be executed.")
+                        return
+                    message_request = [
+                     pack_uint(self._sequence()),
+                     bytes([TAG_SERVICES_REQUEST["Write Tag Fragmented"]]),
+                     bytes([len(rp) // 2]),
+                     rp,
+                     pack_uint(DATA_TYPE[data_type]),
+                     pack_uint(len(values)),
+                     pack_dint(byte_offset),
+                     array_of_values]
+                    byte_offset += byte_size
+                    msg = self.build_common_packet_format((DATA_ITEM["Connected"]),
+                      ((b'').join(message_request)),
+                      (ADDRESS_ITEM["Connection Based"]),
+                      addr_data=(self._target_cid))
+                    success, reply = self.send_unit_data(msg)
+                    if not success:
+                        raise DataError(f"send_unit_data returned not valid data - {reply}")
+                array_of_values = b''
+                byte_size = 0
+
+        return True
+
+    def write_string(self, tag, value, size=82):
+        """
+            Rockwell define different string size:
+                STRING  STRING_12   STRING_16   STRING_20   STRING_40   STRING_8
+            by default we assume size 82 (STRING)
+        """
+        data_tag = ".".join((tag, "DATA"))
+        len_tag = ".".join((tag, "LEN"))
+        data_to_send = [
+         0] * size
+        for idx, val in enumerate(value):
+            try:
+                unsigned = ord(val)
+                data_to_send[idx] = unsigned - 256 if unsigned > 127 else unsigned
+            except IndexError:
+                break
+
+        str_len = len(value)
+        if str_len > size:
+            str_len = size
+        result_len = self.write_tag(len_tag, str_len, "DINT")
+        result_data = self.write_array(data_tag, data_to_send, "SINT")
+        return result_data and result_len
+
+    def read_string(self, tag, str_len=None):
+        data_tag = f"{tag}.DATA"
+        if str_len is None:
+            len_tag = f"{tag}.LEN"
+            tmp = self.read_tag(len_tag)
+            length, _ = tmp or (None, None)
+        else:
+            length = str_len
+        if length:
+            values = self.read_array(data_tag, length)
+            if values:
+                _, values = zip(*values)
+                chars = "".join((chr(v + 256) if v < 0 else chr(v) for v in values))
+                string, *_ = chars.split("\x00", maxsplit=1)
+                return string
+
+    def _check_reply(self, reply):
+        raise NotImplementedError("The method has not been implemented")
+
+    def nop(self):
+        """ No replay command
+
+        A NOP provides a way for either an originator or target to determine if the TCP connection is still open.
+        """
+        message = self.build_header(ENCAPSULATION_COMMAND["nop"], 0)
+        self._send(message)
+
+    def send_unit_data(self, message):
+        """ SendUnitData send encapsulated connected messages.
+
+        :param message: The message to be send to the target
+        :return: the replay received from the target
+        """
+        msg = self.build_header(ENCAPSULATION_COMMAND["send_unit_data"], len(message))
+        msg += message
+        self._send(msg)
+        reply = self._receive()
+        status = self._check_reply(reply)
+        if status is None:
+            return (True, reply)
+        return (False, status)
+
+    def build_header(self, command, length):
+        """ Build the encapsulate message header
+
+        The header is 24 bytes fixed length, and includes the command and the length of the optional data portion.
+
+         :return: the header
+        """
+        try:
+            h = command
+            h += pack_uint(length)
+            h += pack_dint(self._session)
+            h += pack_dint(0)
+            h += self.attribs["context"]
+            h += pack_dint(self.attribs["option"])
+            return h
+        except Exception as e:
+            try:
+                raise CommError(e)
+            finally:
+                e = None
+                del e
+
+    @staticmethod
+    def create_tag_rp(tag, multi_requests=False):
+        """ Create tag Request Packet
+
+        It returns the request packed wrapped around the tag passed.
+        If any error it returns none
+        """
+        tags = tag.encode().split(b'.')
+        rp = []
+        index = []
+        for tag in tags:
+            add_index = False
+            if b'[' in tag:
+                tag = tag[None[:len(tag) - 1]]
+                inside_value = tag[(tag.find(b'[') + 1)[:None]]
+                index = inside_value.split(b',')
+                add_index = True
+                tag = tag[None[:tag.find(b'[')]]
+            tag_length = len(tag)
+            rp.append(EXTENDED_SYMBOL)
+            rp.append(bytes([tag_length]))
+            rp += [bytes([char]) for char in tag]
+            if tag_length % 2:
+                rp.append(PADDING_BYTE)
+            if add_index:
+                for idx in index:
+                    val = int(idx)
+                    if val <= 255:
+                        rp.append(ELEMENT_TYPE["8-bit"])
+                        rp.append(pack_usint(val))
+                    elif val <= 65535:
+                        rp.append(ELEMENT_TYPE["16-bit"])
+                        rp.append(pack_uint(val))
+                    elif val <= 68719476735L:
+                        rp.append(ELEMENT_TYPE["32-bit"])
+                        rp.append(pack_dint(val))
+                    else:
+                        return
+
+        if multi_requests:
+            request_path = bytes([len(rp) // 2]) + (b'').join(rp)
+        else:
+            request_path = (b'').join(rp)
+        return request_path
+
+    @staticmethod
+    def build_common_packet_format(message_type, message, addr_type, addr_data=None, timeout=10):
+        """ build_common_packet_format
+
+        It creates the common part for a CIP message. Check Volume 2 (page 2.22) of CIP specification  for reference
+        """
+        msg = pack_dint(0)
+        msg += pack_uint(timeout)
+        msg += pack_uint(2)
+        msg += addr_type
+        if addr_data is not None:
+            msg += pack_uint(len(addr_data))
+            msg += addr_data
+        else:
+            msg += b'\x00\x00'
+        msg += message_type
+        msg += pack_uint(len(message))
+        msg += message
+        return msg
+
+    @staticmethod
+    def build_multiple_service(rp_list, sequence=None):
+        mr = [
+         bytes([TAG_SERVICES_REQUEST["Multiple Service Packet"]]),
+         pack_usint(2),
+         CLASS_TYPE["8-bit"],
+         CLASS_CODE["Message Router"],
+         INSTANCE_TYPE["8-bit"],
+         b'\x01',
+         pack_uint(len(rp_list))]
+        if sequence is not None:
+            mr.insert(0, pack_uint(sequence))
+        offset = len(rp_list) * 2 + 2
+        for index, rp in enumerate(rp_list):
+            mr.append(pack_uint(offset))
+            offset += len(rp)
+
+        mr += rp_list
+        return mr
+
+    @staticmethod
+    def parse_multiple_request(message, tags, typ):
+        """ parse_multi_request
+        This function should be used to parse the message replayed to a multi request service rapped around the
+        send_unit_data message.
+
+        :param message: the full message returned from the PLC
+        :param tags: The list of tags to be read
+        :param typ: to specify if multi request service READ or WRITE
+        :return: a list of tuple in the format [ (tag name, value, data type), ( tag name, value, data type) ].
+                 In case of error the tuple will be (tag name, None, None)
+        """
+        offset = 50
+        position = 50
+        number_of_service_replies = unpack_uint(message[offset[:offset + 2]])
+        tag_list = []
+        for index in range(number_of_service_replies):
+            position += 2
+            start = offset + unpack_uint(message[position[:position + 2]])
+            general_status = unpack_usint(message[(start + 2)[:start + 3]])
+            if general_status == 0:
+                if typ == "READ":
+                    data_type = unpack_uint(message[(start + 4)[:start + 6]])
+                    try:
+                        value_begin = start + 6
+                        value_end = value_begin + DATA_FUNCTION_SIZE[DATA_TYPE[data_type]]
+                        value = message[value_begin[:value_end]]
+                        tag_list.append((tags[index],
+                         UNPACK_DATA_FUNCTION[DATA_TYPE[data_type]](value),
+                         DATA_TYPE[data_type]))
+                    except LookupError:
+                        tag_list.append((tags[index], None, None))
+
+                else:
+                    tag_list.append(tags[index] + ('GOOD', ))
+            elif typ == "READ":
+                tag_list.append((tags[index], None, None))
+            else:
+                tag_list.append(tags[index] + ('BAD', ))
+
+        return tag_list
+
+
+def _unit_data_status(reply):
+    return unpack_usint(reply[48[:49]])
diff --git a/APPS_UNCOMPILED/lib/pycomm3/com_server.py b/APPS_UNCOMPILED/lib/pycomm3/com_server.py
new file mode 100644
index 0000000..79d5a23
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/com_server.py
@@ -0,0 +1,55 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/com_server.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 2908 bytes
+raise NotImplementedError("COMServer implementation is incomplete")
+import pythoncom
+from pycomm3 import LogixDriver, Tag
+CLSID = "{7038d3a1-1ac4-4522-97d5-4c5a08a29906}"
+
+class COMTag(Tag):
+    _public_attrs_ = [
+     "name", "value", "type", "error"]
+    _readonly_attrs_ = _public_attrs_
+
+
+class LogixDriverCOMServer:
+    _reg_clsctx_ = pythoncom.CLSCTX_LOCAL_SERVER
+    _public_methods_ = ["open", "close", "read_tag", "write"]
+    _readonlu_attrs_ = []
+    _public_attrs_ = []
+    _reg_clsid_ = CLSID
+    _reg_desc_ = "Pycomm3 - Python Ethernet/IP ControlLogix Library COM Server"
+    _reg_progid_ = "Pycomm3.COMServer"
+
+    def __init__(self):
+        self.plc = None
+
+    def open(self, path, init_tags=True, init_program_tags=False, init_info=True):
+        self.plc = LogixDriver(path, init_tags=init_tags, init_program_tags=init_program_tags, init_info=init_info)
+        self.plc.open()
+
+    def close(self):
+        self.plc.close()
+
+    def read(self, tag):
+        result = self.plc.read(tag)
+        if result:
+            return result.value
+
+    def write(self, *tag_values):
+        return (self.plc.write)(*tag_values)
+
+
+def register_COM_server():
+    import sys
+    if "--register" in sys.argv or "--unregister" in sys.argv:
+        import win32com.server.register
+        win32com.server.register.UseCommandLine(LogixDriverCOMServer)
+
+
+if __name__ == "__main__":
+    register_COM_server()
diff --git a/APPS_UNCOMPILED/lib/pycomm3/const.py b/APPS_UNCOMPILED/lib/pycomm3/const.py
new file mode 100644
index 0000000..7448d26
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/const.py
@@ -0,0 +1,1661 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/const.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 56928 bytes
+from .map import EnumMap
+from .bytes_ import Pack, Unpack
+HEADER_SIZE = 24
+MULTISERVICE_READ_OVERHEAD = 6
+READ_RESPONSE_OVERHEAD = 10
+MIN_VER_INSTANCE_IDS = 21
+MIN_VER_LARGE_CONNECTIONS = 20
+MIN_VER_EXTERNAL_ACCESS = 18
+MICRO800_PREFIX = "2080"
+EXTENDED_SYMBOL = b'\x91'
+SUCCESS = 0
+INSUFFICIENT_PACKETS = 6
+OFFSET_MESSAGE_REQUEST = 40
+PAD = b'\x00'
+PRIORITY = b'\n'
+TIMEOUT_TICKS = b'\x05'
+TIMEOUT_MULTIPLIER = b'\x07'
+TRANSPORT_CLASS = b'\xa3'
+BASE_TAG_BIT = 67108864
+SEC_TO_US = 1000000
+TEMPLATE_MEMBER_INFO_LEN = 8
+STRUCTURE_READ_REPLY = b'\xa0\x02'
+SLC_CMD_CODE = b'\x0f'
+SLC_CMD_REPLY_CODE = b'O'
+SLC_FNC_READ = b'\xa2'
+SLC_FNC_WRITE = b'\xaa'
+SLC_REPLY_START = 61
+PCCC_PATH = b'g$\x01'
+ELEMENT_TYPE = {
+ '8-bit': b'(', 
+ '16-bit': b')\x00', 
+ '32-bit': b'*\x00\x00\x00', 
+ 1: b'(', 
+ 2: b')\x00', 
+ 3: b'*\x00\x00\x00'}
+CLASS_TYPE = {
+ '8-bit': b' ', 
+ '16-bit': b'!\x00', 
+ 1: b' ', 
+ 2: b'!\x00'}
+INSTANCE_TYPE = {
+ '8-bit': b'$', 
+ '16-bit': b'%\x00', 
+ 1: b'$', 
+ 2: b'%\x00'}
+ATTRIBUTE_TYPE = {
+ '8-bit': b'0', 
+ '16-bit': b'1\x00', 
+ 1: b'0', 
+ 2: b'1\x00'}
+PATH_SEGMENTS = {
+ 'backplane': 1, 
+ 'bp': 1, 
+ 'enet': 2, 
+ 'dhrio-a': 2, 
+ 'dhrio-b': 3, 
+ 'dnet': 2, 
+ 'cnet': 2, 
+ 'dh485-a': 2, 
+ 'dh485-b': 3}
+
+class ConnectionManagerService(EnumMap):
+    forward_close = b'N'
+    unconnected_send = b'R'
+    forward_open = b'T'
+    get_connection_data = b'V'
+    search_connection_data = b'W'
+    get_connection_owner = b'Z'
+    large_forward_open = b'['
+
+
+class ConnectionManagerInstance(EnumMap):
+    open_request = b'\x01'
+    open_format_rejected = b'\x02'
+    open_resource_rejected = b'\x03'
+    open_other_rejected = b'\x04'
+    close_request = b'\x05'
+    close_format_request = b'\x06'
+    close_other_request = b'\x07'
+    connection_timeout = b'\x08'
+
+
+class CommonService(EnumMap):
+    get_attributes_all = b'\x01'
+    set_attributes_all = b'\x02'
+    get_attribute_list = b'\x03'
+    set_attribute_list = b'\x04'
+    reset = b'\x05'
+    start = b'\x06'
+    stop = b'\x07'
+    create = b'\x08'
+    delete = b'\t'
+    multiple_service_request = b'\n'
+    apply_attributes = b'\r'
+    get_attribute_single = b'\x0e'
+    set_attribute_single = b'\x10'
+    find_next_object_instance = b'\x11'
+    error_response = b'\x14'
+    restore = b'\x15'
+    save = b'\x16'
+    nop = b'\x17'
+    get_member = b'\x18'
+    set_member = b'\x19'
+    insert_member = b'\x1a'
+    remove_member = b'\x1b'
+    group_sync = b'\x1c'
+
+
+class EncapsulationCommand(EnumMap):
+    nop = b'\x00\x00'
+    list_targets = b'\x01\x00'
+    list_services = b'\x04\x00'
+    list_identity = b'c\x00'
+    list_interfaces = b'd\x00'
+    register_session = b'e\x00'
+    unregister_session = b'f\x00'
+    send_rr_data = b'o\x00'
+    send_unit_data = b'p\x00'
+
+
+class ClassCode(EnumMap):
+    identity_object = b'\x01'
+    message_router = b'\x02'
+    symbol_object = b'k'
+    template_object = b'l'
+    connection_manager = b'\x06'
+    program_name = b'd'
+    wall_clock_time = b'\x8b'
+    tcpip = b'\xf5'
+    ethernet_link = b'\xf6'
+    modbus_serial_link = b'F'
+
+
+MSG_ROUTER_PATH = (b'').join([CLASS_TYPE["8-bit"], ClassCode.message_router, INSTANCE_TYPE["8-bit"], b'\x01'])
+
+class TagService(EnumMap):
+    read_tag = b'L'
+    read_tag_fragmented = b'R'
+    write_tag = b'M'
+    write_tag_fragmented = b'S'
+    read_modify_write = b'N'
+    get_instance_attribute_list = b'U'
+
+    @classmethod
+    def from_reply(cls, reply_service):
+        return cls.get(Pack.usint(Unpack.usint(reply_service) - 128))
+
+
+MULTI_PACKET_SERVICES = {
+ TagService.read_tag_fragmented,
+ TagService.write_tag_fragmented,
+ TagService.get_instance_attribute_list,
+ CommonService.multiple_service_request,
+ CommonService.get_attribute_list}
+
+class DataItem(EnumMap):
+    connected = b'\xb1\x00'
+    unconnected = b'\xb2\x00'
+
+
+class AddressItem(EnumMap):
+    connection = b'\xa1\x00'
+    null = b'\x00\x00'
+    uccm = b'\x00\x00'
+
+
+class DataTypeSize(EnumMap):
+    bool = 1
+    sint = 1
+    usint = 1
+    byte = 1
+    int = 2
+    uint = 2
+    word = 2
+    dint = 4
+    udint = 4
+    real = 4
+    dword = 4
+    lint = 8
+    ulint = 8
+    lword = 8
+    short_string = 84
+
+
+class DataType(EnumMap):
+    _return_caps_only_ = True
+    bool = 193
+    sint = 194
+    int = 195
+    dint = 196
+    lint = 197
+    usint = 198
+    uint = 199
+    udint = 200
+    ulint = 201
+    real = 202
+    lreal = 203
+    stime = 204
+    date = 205
+    time_of_day = 206
+    date_and_time = 207
+    string = 208
+    byte = 209
+    word = 210
+    dword = 211
+    lword = 212
+    string2 = 213
+    ftime = 214
+    ltime = 215
+    itime = 216
+    stringn = 217
+    short_string = 218
+    time = 219
+    epath = 220
+    engunit = 221
+    stringi = 222
+
+
+EXTERNAL_ACCESS = {
+ 0: '"Read/Write"', 
+ 1: '"Reserved"', 
+ 2: '"Read Only"', 
+ 3: '"None"'}
+STATES = {**{
+ 0: '"Nonexistent"', 
+ 1: '"Device Self Testing"', 
+ 2: '"Standby"', 
+ 3: '"Operational"', 
+ 4: '"Major Recoverable Fault"', 
+ 5: '"Major Unrecoverable Fault"'}, **{i: "Reserved" for i in range(6, 255)}, **{255: "Default for Get_Attributes_All service"}}
+KEYSWITCH = {96:{
+  16: '"RUN"', 
+  17: '"RUN"', 
+  48: '"REMOTE RUN"', 
+  49: '"REMOTE RUN"'}, 
+ 112:{
+  32: '"PROG"', 
+  33: '"PROG"', 
+  48: '"REMOTE PROG"', 
+  49: '"REMOTE PROG"'}}
+STATUS = {
+ 0: '"Success"', 
+ 1: '"The sender issued an invalid or unsupported encapsulation command"', 
+ 2: '"Insufficient memory"', 
+ 3: '"Poorly formed or incorrect data in the data portion"', 
+ 100: '"An originator used an invalid session handle when sending an encapsulation message to the target"', 
+ 101: '"The target received a message of invalid length"', 
+ 105: '"Unsupported Protocol Version"'}
+SERVICE_STATUS = {
+ 1: '"Connection failure (see extended status)"', 
+ 2: '"Insufficient resource"', 
+ 3: '"Invalid value"', 
+ 4: '"IOI syntax error. A syntax error was detected decoding the Request Path (see extended status)"', 
+ 5: '"Destination unknown, class unsupported, instance undefined or structure element undefined (see extended status)"', 
+ 6: '"Insufficient Packet Space"', 
+ 7: '"Connection lost"', 
+ 8: '"Service not supported"', 
+ 9: '"Error in data segment or invalid attribute value"', 
+ 10: '"Attribute list error"', 
+ 11: '"State already exist"', 
+ 12: '"Object state conflict"', 
+ 13: '"Object already exist"', 
+ 14: '"Attribute not settable"', 
+ 15: '"Permission denied"', 
+ 16: '"Device state conflict"', 
+ 17: '"Reply data too large"', 
+ 18: '"Fragmentation of a primitive value"', 
+ 19: '"Insufficient command data"', 
+ 20: '"Attribute not supported"', 
+ 21: '"Too much data"', 
+ 22: '"Object does not exist"', 
+ 26: '"Bridge request too large"', 
+ 27: '"Bridge response too large"', 
+ 28: '"Attribute list shortage"', 
+ 29: '"Invalid attribute list"', 
+ 30: '"Request service error"', 
+ 31: '"Connection related failure (see extended status)"', 
+ 34: '"Invalid reply received"', 
+ 37: '"Key segment error"', 
+ 38: '"Invalid IOI error"', 
+ 39: '"Unexpected attribute in list"', 
+ 40: '"DeviceNet error - invalid member ID"', 
+ 41: '"DeviceNet error - member not settable"', 
+ 209: '"Module not in run state"', 
+ 251: '"Message port not supported"', 
+ 252: '"Message unsupported data type"', 
+ 253: '"Message uninitialized"', 
+ 254: '"Message timeout"', 
+ 255: '"General Error (see extended status)"'}
+EXTEND_CODES = {1:{
+  256: '"Connection in use"', 
+  259: '"Transport not supported"', 
+  262: '"Ownership conflict"', 
+  263: '"Connection not found"', 
+  264: '"Invalid connection type"', 
+  265: '"Invalid connection size"', 
+  272: '"Module not configured"', 
+  273: '"EPR not supported"', 
+  276: '"Wrong module"', 
+  277: '"Wrong device type"', 
+  278: '"Wrong revision"', 
+  280: '"Invalid configuration format"', 
+  282: '"Application out of connections"', 
+  515: '"Connection timeout"', 
+  516: '"Unconnected message timeout"', 
+  517: '"Unconnected send parameter error"', 
+  518: '"Message too large"', 
+  769: '"No buffer memory"', 
+  770: '"Bandwidth not available"', 
+  771: '"No screeners available"', 
+  773: '"Signature match"', 
+  785: '"Port not available"', 
+  786: '"Link address not available"', 
+  789: '"Invalid segment type"', 
+  791: '"Connection not scheduled"'}, 
+ 4:{0:"Extended status out of memory", 
+  1:"Extended status out of instances"}, 
+ 5:{0:"Extended status out of memory", 
+  1:"Extended status out of instances"}, 
+ 31:{515: "Connection timeout"}, 
+ 255:{
+  7: '"Wrong data type"', 
+  8193: '"Excessive IOI"', 
+  8194: '"Bad parameter value"', 
+  8216: '"Semaphore reject"', 
+  8219: '"Size too small"', 
+  8220: '"Invalid size"', 
+  8448: '"Privilege failure"', 
+  8449: '"Invalid keyswitch position"', 
+  8450: '"Password invalid"', 
+  8451: '"No password issued"', 
+  8452: '"Address out of range"', 
+  8453: '"Access beyond end of the object"', 
+  8454: '"Data in use"', 
+  8455: '"Tag type used in request does not match the target tag\'s data type"', 
+  8456: '"Controller in upload or download mode"', 
+  8457: '"Attempt to change number of array dimensions"', 
+  8458: '"Invalid symbol name"', 
+  8459: '"Symbol does not exist"', 
+  8462: '"Search failed"', 
+  8463: '"Task cannot start"', 
+  8464: '"Unable to write"', 
+  8465: '"Unable to read"', 
+  8466: '"Shared routine not editable"', 
+  8467: '"Controller in faulted mode"', 
+  8468: '"Run mode inhibited"'}}
+PCCC_DATA_TYPE = {
+ 'N': b'\x89', 
+ 'B': b'\x85', 
+ 'T': b'\x86', 
+ 'C': b'\x87', 
+ 'S': b'\x84', 
+ 'F': b'\x8a', 
+ 'ST': b'\x8d', 
+ 'A': b'\x8e', 
+ 'R': b'\x88', 
+ 'O': b'\x8b', 
+ 'I': b'\x8c'}
+PCCC_DATA_SIZE = {
+ 'N': 2, 
+ 'B': 2, 
+ 'T': 6, 
+ 'C': 6, 
+ 'S': 2, 
+ 'F': 4, 
+ 'ST': 84, 
+ 'A': 2, 
+ 'R': 6, 
+ 'O': 2, 
+ 'I': 2}
+PCCC_CT = {
+ 'PRE': 1, 
+ 'ACC': 2, 
+ 'EN': 15, 
+ 'TT': 14, 
+ 'DN': 13, 
+ 'CU': 15, 
+ 'CD': 14, 
+ 'OV': 12, 
+ 'UN': 11, 
+ 'UA': 10}
+PCCC_ERROR_CODE = {
+ -2: '"Not Acknowledged (NAK)"', 
+ -3: '"No Reponse, Check COM Settings"', 
+ -4: '"Unknown Message from DataLink Layer"', 
+ -5: '"Invalid Address"', 
+ -6: '"Could Not Open Com Port"', 
+ -7: '"No data specified to data link layer"', 
+ -8: '"No data returned from PLC"', 
+ -20: '"No Data Returned"', 
+ 16: '"Illegal Command or Format, Address may not exist or not enough elements in data file"', 
+ 32: '"PLC Has a Problem and Will Not Communicate"', 
+ 48: '"Remote Node Host is Missing, Disconnected, or Shut Down"', 
+ 64: '"Host Could Not Complete Function Due To Hardware Fault"', 
+ 80: '"Addressing problem or Memory Protect Rungs"', 
+ 96: '"Function not allows due to command protection selection"', 
+ 112: '"Processor is in Program mode"', 
+ 128: '"Compatibility mode file missing or communication zone problem"', 
+ 144: '"Remote node cannot buffer command"', 
+ 240: '"Error code in EXT STS Byte"'}
+PRODUCT_TYPES = {
+ 0: '"Generic Device (deprecated)"', 
+ 2: '"AC Drive"', 
+ 3: '"Motor Overload"', 
+ 4: '"Limit Switch"', 
+ 5: '"Inductive Proximity Switch"', 
+ 6: '"Photoelectric Sensor"', 
+ 7: '"General Purpose Discrete I/O"', 
+ 9: '"Resolver"', 
+ 12: '"Communications Adapter"', 
+ 14: '"Programmable Logic Controller"', 
+ 16: '"Position Controller"', 
+ 19: '"DC Drive"', 
+ 21: '"Contactor"', 
+ 22: '"Motor Starter"', 
+ 23: '"Soft Start"', 
+ 24: '"Human-Machine Interface"', 
+ 26: '"Mass Flow Controller"', 
+ 27: '"Pneumatic Valve"', 
+ 28: '"Vacuum Pressure Gauge"', 
+ 29: '"Process Control Value"', 
+ 30: '"Residual Gas Analyzer"', 
+ 31: '"DC Power Generator"', 
+ 32: '"RF Power Generator"', 
+ 33: '"Turbomolecular Vacuum Pump"', 
+ 34: '"Encoder"', 
+ 35: '"Safety Discrete I/O Device"', 
+ 36: '"Fluid Flow Controller"', 
+ 37: '"CIP Motion Drive"', 
+ 38: '"CompoNet Repeater"', 
+ 39: '"Mass Flow Controller, Enhanced"', 
+ 40: '"CIP Modbus Device"', 
+ 41: '"CIP Modbus Translator"', 
+ 42: '"Safety Analog I/O Device"', 
+ 43: '"Generic Device (keyable)"', 
+ 44: '"Managed Switch"', 
+ 50: '"ControlNet Physical Layer Component"'}
+VENDORS = {
+ 0: '"Reserved"', 
+ 1: '"Rockwell Automation/Allen-Bradley"', 
+ 2: '"Namco Controls Corp."', 
+ 3: '"Honeywell Inc."', 
+ 4: '"Parker Hannifin Corp. (Veriflo Division)"', 
+ 5: '"Rockwell Automation/Reliance Elec."', 
+ 6: '"Reserved"', 
+ 7: '"SMC Corporation"', 
+ 8: '"Molex Incorporated"', 
+ 9: '"Western Reserve Controls Corp."', 
+ 10: '"Advanced Micro Controls Inc. (AMCI)"', 
+ 11: '"ASCO Pneumatic Controls"', 
+ 12: '"Banner Engineering Corp."', 
+ 13: '"Belden Wire & Cable Company"', 
+ 14: '"Cooper Interconnect"', 
+ 15: '"Reserved"', 
+ 16: '"Daniel Woodhead Co. (Woodhead Connectivity)"', 
+ 17: '"Dearborn Group Inc."', 
+ 18: '"Reserved"', 
+ 19: '"Helm Instrument Company"', 
+ 20: '"Huron Net Works"', 
+ 21: '"Lumberg, Inc."', 
+ 22: '"Online Development Inc.(Automation Value)"', 
+ 23: '"Vorne Industries, Inc."', 
+ 24: '"ODVA Special Reserve"', 
+ 25: '"Reserved"', 
+ 26: '"Festo Corporation"', 
+ 27: '"Reserved"', 
+ 28: '"Reserved"', 
+ 29: '"Reserved"', 
+ 30: '"Unico, Inc."', 
+ 31: '"Ross Controls"', 
+ 32: '"Reserved"', 
+ 33: '"Reserved"', 
+ 34: '"Hohner Corp."', 
+ 35: '"Micro Mo Electronics, Inc."', 
+ 36: '"MKS Instruments, Inc."', 
+ 37: '"Yaskawa Electric America formerly Magnetek Drives"', 
+ 38: '"Reserved"', 
+ 39: '"AVG Automation (Uticor)"', 
+ 40: '"Wago Corporation"', 
+ 41: '"Kinetics (Unit Instruments)"', 
+ 42: '"IMI Norgren Limited"', 
+ 43: '"BALLUFF, Inc."', 
+ 44: '"Yaskawa Electric America, Inc."', 
+ 45: '"Eurotherm Controls Inc"', 
+ 46: '"ABB Industrial Systems"', 
+ 47: '"Omron Corporation"', 
+ 48: '"TURCk, Inc."', 
+ 49: '"Grayhill Inc."', 
+ 50: '"Real Time Automation (C&ID)"', 
+ 51: '"Reserved"', 
+ 52: '"Numatics, Inc."', 
+ 53: '"Lutze, Inc."', 
+ 54: '"Reserved"', 
+ 55: '"Reserved"', 
+ 56: '"Softing GmbH"', 
+ 57: '"Pepperl + Fuchs"', 
+ 58: '"Spectrum Controls, Inc."', 
+ 59: '"D.I.P. Inc. MKS Inst."', 
+ 60: '"Applied Motion Products, Inc."', 
+ 61: '"Sencon Inc."', 
+ 62: '"High Country Tek"', 
+ 63: '"SWAC Automation Consult GmbH"', 
+ 64: '"Clippard Instrument Laboratory"', 
+ 65: '"Reserved"', 
+ 66: '"Reserved"', 
+ 67: '"Reserved"', 
+ 68: '"Eaton Electrical"', 
+ 69: '"Reserved"', 
+ 70: '"Reserved"', 
+ 71: '"Toshiba International Corp."', 
+ 72: '"Control Technology Incorporated"', 
+ 73: '"TCS (NZ) Ltd."', 
+ 74: '"Hitachi, Ltd."', 
+ 75: '"ABB Robotics Products AB"', 
+ 76: '"NKE Corporation"', 
+ 77: '"Rockwell Software, Inc."', 
+ 78: '"Escort Memory Systems (A Datalogic Group Co.)"', 
+ 79: '"Reserved"', 
+ 80: '"Industrial Devices Corporation"', 
+ 81: '"IXXAT Automation GmbH"', 
+ 82: '"Mitsubishi Electric Automation, Inc."', 
+ 83: '"OPTO-22"', 
+ 84: '"Reserved"', 
+ 85: '"Reserved"', 
+ 86: '"Horner Electric"', 
+ 87: '"Burkert Werke GmbH & Co. KG"', 
+ 88: '"Reserved"', 
+ 89: '"Industrial Indexing Systems, Inc."', 
+ 90: '"HMS Industrial Networks AB"', 
+ 91: '"Robicon"', 
+ 92: '"Helix Technology (Granville-Phillips)"', 
+ 93: '"Arlington Laboratory"', 
+ 94: '"Advantech Co. Ltd."', 
+ 95: '"Square D Company"', 
+ 96: '"Digital Electronics Corp."', 
+ 97: '"Danfoss"', 
+ 98: '"Reserved"', 
+ 99: '"Reserved"', 
+ 100: '"Bosch Rexroth Corporation, Pneumatics"', 
+ 101: '"Applied Materials, Inc."', 
+ 102: '"Showa Electric Wire & Cable Co."', 
+ 103: '"Pacific Scientific (API Controls Inc.)"', 
+ 104: '"Sharp Manufacturing Systems Corp."', 
+ 105: '"Olflex Wire & Cable, Inc."', 
+ 106: '"Reserved"', 
+ 107: '"Unitrode"', 
+ 108: '"Beckhoff Automation GmbH"', 
+ 109: '"National Instruments"', 
+ 110: '"Mykrolis Corporations (Millipore)"', 
+ 111: '"International Motion Controls Corp."', 
+ 112: '"Reserved"', 
+ 113: '"SEG Kempen GmbH"', 
+ 114: '"Reserved"', 
+ 115: '"Reserved"', 
+ 116: '"MTS Systems Corp."', 
+ 117: '"Krones, Inc"', 
+ 118: '"Reserved"', 
+ 119: '"EXOR Electronic R & D"', 
+ 120: '"SIEI S.p.A."', 
+ 121: '"KUKA Roboter GmbH"', 
+ 122: '"Reserved"', 
+ 123: '"SEC (Samsung Electronics Co., Ltd)"', 
+ 124: '"Binary Electronics Ltd"', 
+ 125: '"Flexible Machine Controls"', 
+ 126: '"Reserved"', 
+ 127: '"ABB Inc. (Entrelec)"', 
+ 128: '"MAC Valves, Inc."', 
+ 129: '"Auma Actuators Inc"', 
+ 130: '"Toyoda Machine Works, Ltd"', 
+ 131: '"Reserved"', 
+ 132: '"Reserved"', 
+ 133: '"Balogh T.A.G., Corporation"', 
+ 134: '"TR Systemtechnik GmbH"', 
+ 135: '"UNIPULSE Corporation"', 
+ 136: '"Reserved"', 
+ 137: '"Reserved"', 
+ 138: '"Conxall Corporation Inc."', 
+ 139: '"Reserved"', 
+ 140: '"Reserved"', 
+ 141: '"Kuramo Electric Co., Ltd."', 
+ 142: '"Creative Micro Designs"', 
+ 143: '"GE Industrial Systems"', 
+ 144: '"Leybold Vacuum GmbH"', 
+ 145: '"Siemens Energy & Automation/Drives"', 
+ 146: '"Kodensha Ltd"', 
+ 147: '"Motion Engineering, Inc."', 
+ 148: '"Honda Engineering Co., Ltd"', 
+ 149: '"EIM Valve Controls"', 
+ 150: '"Melec Inc."', 
+ 151: '"Sony Manufacturing Systems Corporation"', 
+ 152: '"North American Mfg."', 
+ 153: '"WATLOW"', 
+ 154: '"Japan Radio Co., Ltd"', 
+ 155: '"NADEX Co., Ltd"', 
+ 156: '"Ametek Automation & Process Technologies"', 
+ 157: '"Reserved"', 
+ 158: '"KVASER AB"', 
+ 159: '"IDEC IZUMI Corporation"', 
+ 160: '"Mitsubishi Heavy Industries Ltd"', 
+ 161: '"Mitsubishi Electric Corporation"', 
+ 162: '"Horiba-STEC Inc."', 
+ 163: '"esd electronic system design gmbh"', 
+ 164: '"DAIHEN Corporation"', 
+ 165: '"Tyco Valves & Controls/Keystone"', 
+ 166: '"EBARA Corporation"', 
+ 167: '"Reserved"', 
+ 168: '"Reserved"', 
+ 169: '"Hokuyo Electric Co. Ltd"', 
+ 170: '"Pyramid Solutions, Inc."', 
+ 171: '"Denso Wave Incorporated"', 
+ 172: '"HLS Hard-Line Solutions Inc"', 
+ 173: '"Caterpillar, Inc."', 
+ 174: '"PDL Electronics Ltd."', 
+ 175: '"Reserved"', 
+ 176: '"Red Lion Controls"', 
+ 177: '"ANELVA Corporation"', 
+ 178: '"Toyo Denki Seizo KK"', 
+ 179: '"Sanyo Denki Co., Ltd"', 
+ 180: '"Advanced Energy Japan K.K. (Aera Japan)"', 
+ 181: '"Pilz GmbH & Co"', 
+ 182: '"Marsh Bellofram-Bellofram PCD Division"', 
+ 183: '"Reserved"', 
+ 184: '"M-SYSTEM Co. Ltd"', 
+ 185: '"Nissin Electric Co., Ltd"', 
+ 186: '"Hitachi Metals Ltd."', 
+ 187: '"Oriental Motor Company"', 
+ 188: '"A&D Co., Ltd"', 
+ 189: '"Phasetronics, Inc."', 
+ 190: '"Cummins Engine Company"', 
+ 191: '"Deltron Inc."', 
+ 192: '"Geneer Corporation"', 
+ 193: '"Anatol Automation, Inc."', 
+ 194: '"Reserved"', 
+ 195: '"Reserved"', 
+ 196: '"Medar, Inc."', 
+ 197: '"Comdel Inc."', 
+ 198: '"Advanced Energy Industries, Inc"', 
+ 199: '"Reserved"', 
+ 200: '"DAIDEN Co., Ltd"', 
+ 201: '"CKD Corporation"', 
+ 202: '"Toyo Electric Corporation"', 
+ 203: '"Reserved"', 
+ 204: '"AuCom Electronics Ltd"', 
+ 205: '"Shinko Electric Co., Ltd"', 
+ 206: '"Vector Informatik GmbH"', 
+ 207: '"Reserved"', 
+ 208: '"Moog Inc."', 
+ 209: '"Contemporary Controls"', 
+ 210: '"Tokyo Sokki Kenkyujo Co., Ltd"', 
+ 211: '"Schenck-AccuRate, Inc."', 
+ 212: '"The Oilgear Company"', 
+ 213: '"Reserved"', 
+ 214: '"ASM Japan K.K."', 
+ 215: '"HIRATA Corp."', 
+ 216: '"SUNX Limited"', 
+ 217: '"Meidensha Corp."', 
+ 218: '"NIDEC SANKYO CORPORATION (Sankyo Seiki Mfg. Co., Ltd)"', 
+ 219: '"KAMRO Corp."', 
+ 220: '"Nippon System Development Co., Ltd"', 
+ 221: '"EBARA Technologies Inc."', 
+ 222: '"Reserved"', 
+ 223: '"Reserved"', 
+ 224: '"SG Co., Ltd"', 
+ 225: '"Vaasa Institute of Technology"', 
+ 226: '"MKS Instruments (ENI Technology)"', 
+ 227: '"Tateyama System Laboratory Co., Ltd."', 
+ 228: '"QLOG Corporation"', 
+ 229: '"Matric Limited Inc."', 
+ 230: '"NSD Corporation"', 
+ 231: '"Reserved"', 
+ 232: '"Sumitomo Wiring Systems, Ltd"', 
+ 233: '"Group 3 Technology Ltd"', 
+ 234: '"CTI Cryogenics"', 
+ 235: '"POLSYS CORP"', 
+ 236: '"Ampere Inc."', 
+ 237: '"Reserved"', 
+ 238: '"Simplatroll Ltd"', 
+ 239: '"Reserved"', 
+ 240: '"Reserved"', 
+ 241: '"Leading Edge Design"', 
+ 242: '"Humphrey Products"', 
+ 243: '"Schneider Automation, Inc."', 
+ 244: '"Westlock Controls Corp."', 
+ 245: '"Nihon Weidmuller Co., Ltd"', 
+ 246: '"Brooks Instrument (Div. of Emerson)"', 
+ 247: '"Reserved"', 
+ 248: '" Moeller GmbH"', 
+ 249: '"Varian Vacuum Products"', 
+ 250: '"Yokogawa Electric Corporation"', 
+ 251: '"Electrical Design Daiyu Co., Ltd"', 
+ 252: '"Omron Software Co., Ltd"', 
+ 253: '"BOC Edwards"', 
+ 254: '"Control Technology Corporation"', 
+ 255: '"Bosch Rexroth"', 
+ 256: '"Turck"', 
+ 257: '"Control Techniques PLC"', 
+ 258: '"Hardy Instruments, Inc."', 
+ 259: '"LS Industrial Systems"', 
+ 260: '"E.O.A. Systems Inc."', 
+ 261: '"Reserved"', 
+ 262: '"New Cosmos Electric Co., Ltd."', 
+ 263: '"Sense Eletronica LTDA"', 
+ 264: '"Xycom, Inc."', 
+ 265: '"Baldor Electric"', 
+ 266: '"Reserved"', 
+ 267: '"Patlite Corporation"', 
+ 268: '"Reserved"', 
+ 269: '"Mogami Wire & Cable Corporation"', 
+ 270: '"Welding Technology Corporation (WTC)"', 
+ 271: '"Reserved"', 
+ 272: '"Deutschmann Automation GmbH"', 
+ 273: '"ICP Panel-Tec Inc."', 
+ 274: '"Bray Controls USA"', 
+ 275: '"Reserved"', 
+ 276: '"Status Technologies"', 
+ 277: '"Trio Motion Technology Ltd"', 
+ 278: '"Sherrex Systems Ltd"', 
+ 279: '"Adept Technology, Inc."', 
+ 280: '"Spang Power Electronics"', 
+ 281: '"Reserved"', 
+ 282: '"Acrosser Technology Co., Ltd"', 
+ 283: '"Hilscher GmbH"', 
+ 284: '"IMAX Corporation"', 
+ 285: '"Electronic Innovation, Inc. (Falter Engineering)"', 
+ 286: '"Netlogic Inc."', 
+ 287: '"Bosch Rexroth Corporation, Indramat"', 
+ 288: '"Reserved"', 
+ 289: '"Reserved"', 
+ 290: '"Murata Machinery Ltd."', 
+ 291: '"MTT Company Ltd."', 
+ 292: '"Kanematsu Semiconductor Corp."', 
+ 293: '"Takebishi Electric Sales Co."', 
+ 294: '"Tokyo Electron Device Ltd"', 
+ 295: '"PFU Limited"', 
+ 296: '"Hakko Automation Co., Ltd."', 
+ 297: '"Advanet Inc."', 
+ 298: '"Tokyo Electron Software Technologies Ltd."', 
+ 299: '"Reserved"', 
+ 300: '"Shinagawa Electric Wire Co., Ltd."', 
+ 301: '"Yokogawa M&C Corporation"', 
+ 302: '"KONAN Electric Co., Ltd."', 
+ 303: '"Binar Elektronik AB"', 
+ 304: '"Furukawa Electric Co."', 
+ 305: '"Cooper Energy Services"', 
+ 306: '"Schleicher GmbH & Co."', 
+ 307: '"Hirose Electric Co., Ltd"', 
+ 308: '"Western Servo Design Inc."', 
+ 309: '"Prosoft Technology"', 
+ 310: '"Reserved"', 
+ 311: '"Towa Shoko Co., Ltd"', 
+ 312: '"Kyopal Co., Ltd"', 
+ 313: '"Extron Co."', 
+ 314: '"Wieland Electric GmbH"', 
+ 315: '"SEW Eurodrive GmbH"', 
+ 316: '"Aera Corporation"', 
+ 317: '"STA Reutlingen"', 
+ 318: '"Reserved"', 
+ 319: '"Fuji Electric Co., Ltd."', 
+ 320: '"Reserved"', 
+ 321: '"Reserved"', 
+ 322: '"ifm efector, inc."', 
+ 323: '"Reserved"', 
+ 324: '"IDEACOD-Hohner Automation S.A."', 
+ 325: '"CommScope Inc."', 
+ 326: '"GE Fanuc Automation North America, Inc."', 
+ 327: '"Matsushita Electric Industrial Co., Ltd"', 
+ 328: '"Okaya Electronics Corporation"', 
+ 329: '"KASHIYAMA Industries, Ltd"', 
+ 330: '"JVC"', 
+ 331: '"Interface Corporation"', 
+ 332: '"Grape Systems Inc."', 
+ 333: '"Reserved"', 
+ 334: '"Reserved"', 
+ 335: '"Toshiba IT & Control Systems Corporation"', 
+ 336: '"Sanyo Machine Works, Ltd."', 
+ 337: '"Vansco Electronics Ltd."', 
+ 338: '"Dart Container Corp."', 
+ 339: '"Livingston & Co., Inc."', 
+ 340: '"Alfa Laval LKM as"', 
+ 341: '"BF ENTRON Ltd. (British Federal)"', 
+ 342: '"Bekaert Engineering NV"', 
+ 343: '"Ferran Scientific Inc."', 
+ 344: '"KEBA AG"', 
+ 345: '"Endress + Hauser"', 
+ 346: '"Reserved"', 
+ 347: '"ABB ALSTOM Power UK Ltd. (EGT)"', 
+ 348: '"Berger Lahr GmbH"', 
+ 349: '"Reserved"', 
+ 350: '"Federal Signal Corp."', 
+ 351: '"Kawasaki Robotics (USA), Inc."', 
+ 352: '"Bently Nevada Corporation"', 
+ 353: '"Reserved"', 
+ 354: '"FRABA Posital GmbH"', 
+ 355: '"Elsag Bailey, Inc."', 
+ 356: '"Fanuc Robotics America"', 
+ 357: '"Reserved"', 
+ 358: '"Surface Combustion, Inc."', 
+ 359: '"Reserved"', 
+ 360: '"AILES Electronics Ind. Co., Ltd."', 
+ 361: '"Wonderware Corporation"', 
+ 362: '"Particle Measuring Systems, Inc."', 
+ 363: '"Reserved"', 
+ 364: '"Reserved"', 
+ 365: '"BITS Co., Ltd"', 
+ 366: '"Japan Aviation Electronics Industry Ltd"', 
+ 367: '"Keyence Corporation"', 
+ 368: '"Kuroda Precision Industries Ltd."', 
+ 369: '"Mitsubishi Electric Semiconductor Application"', 
+ 370: '"Nippon Seisen Cable, Ltd."', 
+ 371: '"Omron ASO Co., Ltd"', 
+ 372: '"Seiko Seiki Co., Ltd."', 
+ 373: '"Sumitomo Heavy Industries, Ltd."', 
+ 374: '"Tango Computer Service Corporation"', 
+ 375: '"Technology Service, Inc."', 
+ 376: '"Toshiba Information Systems (Japan) Corporation"', 
+ 377: '"TOSHIBA Schneider Inverter Corporation"', 
+ 378: '"Toyooki Kogyo Co., Ltd."', 
+ 379: '"XEBEC"', 
+ 380: '"Madison Cable Corporation"', 
+ 381: '"Hitati Engineering & Services Co., Ltd"', 
+ 382: '"TEM-TECH Lab Co., Ltd"', 
+ 383: '"International Laboratory Corporation"', 
+ 384: '"Dyadic Systems Co., Ltd."', 
+ 385: '"SETO Electronics Industry Co., Ltd"', 
+ 386: '"Tokyo Electron Kyushu Limited"', 
+ 387: '"KEI System Co., Ltd"', 
+ 388: '"Reserved"', 
+ 389: '"Asahi Engineering Co., Ltd"', 
+ 390: '"Contrex Inc."', 
+ 391: '"Paradigm Controls Ltd."', 
+ 392: '"Reserved"', 
+ 393: '"Ohm Electric Co., Ltd."', 
+ 394: '"RKC Instrument Inc."', 
+ 395: '"Suzuki Motor Corporation"', 
+ 396: '"Custom Servo Motors Inc."', 
+ 397: '"PACE Control Systems"', 
+ 398: '"Reserved"', 
+ 399: '"Reserved"', 
+ 400: '"LINTEC Co., Ltd."', 
+ 401: '"Hitachi Cable Ltd."', 
+ 402: '"BUSWARE Direct"', 
+ 403: '"Eaton Electric B.V. (former Holec Holland N.V.)"', 
+ 404: '"VAT Vakuumventile AG"', 
+ 405: '"Scientific Technologies Incorporated"', 
+ 406: '"Alfa Instrumentos Eletronicos Ltda"', 
+ 407: '"TWK Elektronik GmbH"', 
+ 408: '"ABB Welding Systems AB"', 
+ 409: '"BYSTRONIC Maschinen AG"', 
+ 410: '"Kimura Electric Co., Ltd"', 
+ 411: '"Nissei Plastic Industrial Co., Ltd"', 
+ 412: '"Reserved"', 
+ 413: '"Kistler-Morse Corporation"', 
+ 414: '"Proteous Industries Inc."', 
+ 415: '"IDC Corporation"', 
+ 416: '"Nordson Corporation"', 
+ 417: '"Rapistan Systems"', 
+ 418: '"LP-Elektronik GmbH"', 
+ 419: '"GERBI & FASE S.p.A.(Fase Saldatura)"', 
+ 420: '"Phoenix Digital Corporation"', 
+ 421: '"Z-World Engineering"', 
+ 422: '"Honda R&D Co., Ltd."', 
+ 423: '"Bionics Instrument Co., Ltd."', 
+ 424: '"Teknic, Inc."', 
+ 425: '"R.Stahl, Inc."', 
+ 426: '"Reserved"', 
+ 427: '"Ryco Graphic Manufacturing Inc."', 
+ 428: '"Giddings & Lewis, Inc."', 
+ 429: '"Koganei Corporation"', 
+ 430: '"Reserved"', 
+ 431: '"Nichigoh Communication Electric Wire Co., Ltd."', 
+ 432: '"Reserved"', 
+ 433: '"Fujikura Ltd."', 
+ 434: '"AD Link Technology Inc."', 
+ 435: '"StoneL Corporation"', 
+ 436: '"Computer Optical Products, Inc."', 
+ 437: '"CONOS Inc."', 
+ 438: '"Erhardt + Leimer GmbH"', 
+ 439: '"UNIQUE Co. Ltd"', 
+ 440: '"Roboticsware, Inc."', 
+ 441: '"Nachi Fujikoshi Corporation"', 
+ 442: '"Hengstler GmbH"', 
+ 443: '"Reserved"', 
+ 444: '"SUNNY GIKEN Inc."', 
+ 445: '"Lenze Drive Systems GmbH"', 
+ 446: '"CD Systems B.V."', 
+ 447: '"FMT/Aircraft Gate Support Systems AB"', 
+ 448: '"Axiomatic Technologies Corp"', 
+ 449: '"Embedded System Products, Inc."', 
+ 450: '"Reserved"', 
+ 451: '"Mencom Corporation"', 
+ 452: '"Reserved"', 453: '"Matsushita Welding Systems Co., Ltd."', 
+ 454: '"Dengensha Mfg. Co. Ltd."', 
+ 455: '"Quinn Systems Ltd."', 
+ 456: '"Tellima Technology Ltd"', 
+ 457: '"MDT, Software"', 
+ 458: '"Taiwan Keiso Co., Ltd"', 
+ 459: '"Pinnacle Systems"', 
+ 460: '"Ascom Hasler Mailing Sys"', 
+ 461: '"INSTRUMAR Limited"', 
+ 462: '"Reserved"', 
+ 463: '"Navistar International Transportation Corp"', 
+ 464: '"Huettinger Elektronik GmbH + Co. KG"', 
+ 465: '"OCM Technology Inc."', 
+ 466: '"Professional Supply Inc."', 
+ 468: '"Baumer IVO GmbH & Co. KG"', 
+ 469: '"Worcester Controls Corporation"', 
+ 470: '"Pyramid Technical Consultants, Inc."', 
+ 471: '"Reserved"', 
+ 472: '"Apollo Fire Detectors Limited"', 
+ 473: '"Avtron Manufacturing, Inc."', 
+ 474: '"Reserved"', 
+ 475: '"Tokyo Keiso Co., Ltd."', 
+ 476: '"Daishowa Swiki Co., Ltd."', 
+ 477: '"Kojima Instruments Inc."', 
+ 478: '"Shimadzu Corporation"', 
+ 479: '"Tatsuta Electric Wire & Cable Co., Ltd."', 
+ 480: '"MECS Corporation"', 
+ 481: '"Tahara Electric"', 
+ 482: '"Koyo Electronics"', 
+ 483: '"Clever Devices"', 
+ 484: '"GCD Hardware & Software GmbH"', 
+ 485: '"Reserved"', 
+ 486: '"Miller Electric Mfg Co."', 
+ 487: '"GEA Tuchenhagen GmbH"', 
+ 488: '"Riken Keiki Co., LTD"', 
+ 489: '"Keisokugiken Corporation"', 
+ 490: '"Fuji Machine Mfg. Co., Ltd"', 
+ 491: '"Reserved"', 
+ 492: '"Nidec-Shimpo Corp."', 
+ 493: '"UTEC Corporation"', 
+ 494: '"Sanyo Electric Co. Ltd."', 
+ 495: '"Reserved"', 
+ 496: '"Reserved"', 
+ 497: '"Okano Electric Wire Co. Ltd"', 
+ 498: '"Shimaden Co. Ltd."', 
+ 499: '"Teddington Controls Ltd"', 
+ 500: '"Reserved"', 
+ 501: '"VIPA GmbH"', 
+ 502: '"Warwick Manufacturing Group"', 
+ 503: '"Danaher Controls"', 
+ 504: '"Reserved"', 
+ 505: '"Reserved"', 
+ 506: '"American Science & Engineering"', 
+ 507: '"Accutron Controls International Inc."', 
+ 508: '"Norcott Technologies Ltd"', 
+ 509: '"TB Woods, Inc"', 
+ 510: '"Proportion-Air, Inc."', 
+ 511: '"SICK Stegmann GmbH"', 
+ 512: '"Reserved"', 
+ 513: '"Edwards Signaling"', 
+ 514: '"Sumitomo Metal Industries, Ltd"', 
+ 515: '"Cosmo Instruments Co., Ltd."', 
+ 516: '"Denshosha Co., Ltd."', 
+ 517: '"Kaijo Corp."', 
+ 518: '"Michiproducts Co., Ltd."', 
+ 519: '"Miura Corporation"', 
+ 520: '"TG Information Network Co., Ltd."', 
+ 521: '"Fujikin , Inc."', 
+ 522: '"Estic Corp."', 
+ 523: '"GS Hydraulic Sales"', 
+ 524: '"Reserved"', 
+ 525: '"MTE Limited"', 
+ 526: '"Hyde Park Electronics, Inc."', 
+ 527: '"Pfeiffer Vacuum GmbH"', 
+ 528: '"Cyberlogic Technologies"', 
+ 529: '"OKUMA Corporation FA Systems Division"', 
+ 530: '"Reserved"', 
+ 531: '"Hitachi Kokusai Electric Co., Ltd."', 
+ 532: '"SHINKO TECHNOS Co., Ltd."', 
+ 533: '"Itoh Electric Co., Ltd."', 
+ 534: '"Colorado Flow Tech Inc."', 
+ 535: '"Love Controls Division/Dwyer Inst."', 
+ 536: '"Alstom Drives and Controls"', 
+ 537: '"The Foxboro Company"', 
+ 538: '"Tescom Corporation"', 
+ 539: '"Reserved"', 
+ 540: '"Atlas Copco Controls UK"', 
+ 541: '"Reserved"', 
+ 542: '"Autojet Technologies"', 
+ 543: '"Prima Electronics S.p.A."', 
+ 544: '"PMA GmbH"', 
+ 545: '"Shimafuji Electric Co., Ltd"', 
+ 546: '"Oki Electric Industry Co., Ltd"', 
+ 547: '"Kyushu Matsushita Electric Co., Ltd"', 
+ 548: '"Nihon Electric Wire & Cable Co., Ltd"', 
+ 549: '"Tsuken Electric Ind Co., Ltd"', 
+ 550: '"Tamadic Co."', 
+ 551: '"MAATEL SA"', 
+ 552: '"OKUMA America"', 
+ 554: '"TPC Wire & Cable"', 
+ 555: '"ATI Industrial Automation"', 
+ 557: '"Serra Soldadura, S.A."', 
+ 558: '"Southwest Research Institute"', 
+ 559: '"Cabinplant International"', 
+ 560: '"Sartorius Mechatronics T&H GmbH"', 
+ 561: '"Comau S.p.A. Robotics & Final Assembly Division"', 
+ 562: '"Phoenix Contact"', 
+ 563: '"Yokogawa MAT Corporation"', 
+ 564: '"asahi sangyo co., ltd."', 
+ 565: '"Reserved"', 
+ 566: '"Akita Myotoku Ltd."', 
+ 567: '"OBARA Corp."', 
+ 568: '"Suetron Electronic GmbH"', 
+ 569: '"Reserved"', 
+ 570: '"Serck Controls Limited"', 
+ 571: '"Fairchild Industrial Products Company"', 
+ 572: '"ARO S.A."', 
+ 573: '"M2C GmbH"', 
+ 574: '"Shin Caterpillar Mitsubishi Ltd."', 
+ 575: '"Santest Co., Ltd."', 
+ 576: '"Cosmotechs Co., Ltd."', 
+ 577: '"Hitachi Electric Systems"', 
+ 578: '"Smartscan Ltd"', 
+ 579: '"Woodhead Software & Electronics France"', 
+ 580: '"Athena Controls, Inc."', 
+ 581: '"Syron Engineering & Manufacturing, Inc."', 
+ 582: '"Asahi Optical Co., Ltd."', 
+ 583: '"Sansha Electric Mfg. Co., Ltd."', 
+ 584: '"Nikki Denso Co., Ltd."', 
+ 585: '"Star Micronics, Co., Ltd."', 
+ 586: '"Ecotecnia Socirtat Corp."', 
+ 587: '"AC Technology Corp."', 
+ 588: '"West Instruments Limited"', 
+ 589: '"NTI Limited"', 
+ 590: '"Delta Computer Systems, Inc."', 
+ 591: '"FANUC Ltd."', 
+ 592: '"Hearn-Gu Lee"', 
+ 593: '"ABB Automation Products"', 
+ 594: '"Orion Machinery Co., Ltd."', 
+ 595: '"Reserved"', 
+ 596: '"Wire-Pro, Inc."', 
+ 597: '"Beijing Huakong Technology Co. Ltd."', 
+ 598: '"Yokoyama Shokai Co., Ltd."', 
+ 599: '"Toyogiken Co., Ltd."', 
+ 600: '"Coester Equipamentos Eletronicos Ltda."', 
+ 601: '"Reserved"', 
+ 602: '"Electroplating Engineers of Japan Ltd."', 
+ 603: '"ROBOX S.p.A."', 
+ 604: '"Spraying Systems Company"', 
+ 605: '"Benshaw Inc."', 
+ 606: '"ZPA-DP A.S."', 
+ 607: '"Wired Rite Systems"', 
+ 608: '"Tandis Research, Inc."', 
+ 609: '"SSD Drives GmbH"', 
+ 610: '"ULVAC Japan Ltd."', 
+ 611: '"DYNAX Corporation"', 
+ 612: '"Nor-Cal Products, Inc."', 
+ 613: '"Aros Electronics AB"', 
+ 614: '"Jun-Tech Co., Ltd."', 
+ 615: '"HAN-MI Co. Ltd."', 
+ 616: '"uniNtech (formerly SungGi Internet)"', 
+ 617: '"Hae Pyung Electronics Reserch Institute"', 
+ 618: '"Milwaukee Electronics"', 
+ 619: '"OBERG Industries"', 
+ 620: '"Parker Hannifin/Compumotor Division"', 
+ 621: '"TECHNO DIGITAL CORPORATION"', 
+ 622: '"Network Supply Co., Ltd."', 
+ 623: '"Union Electronics Co., Ltd."', 
+ 624: '"Tritronics Services PM Ltd."', 
+ 625: '"Rockwell Automation-Sprecher+Schuh"', 
+ 626: '"Matsushita Electric Industrial Co., Ltd/Motor Co."', 
+ 627: '"Rolls-Royce Energy Systems, Inc."', 
+ 628: '"JEONGIL INTERCOM CO., LTD"', 
+ 629: '"Interroll Corp."', 
+ 630: '"Hubbell Wiring Device-Kellems (Delaware)"', 
+ 631: '"Intelligent Motion Systems"', 
+ 632: '"Reserved"', 
+ 633: '"INFICON AG"', 
+ 634: '"Hirschmann, Inc."', 
+ 635: '"The Siemon Company"', 
+ 636: '"YAMAHA Motor Co. Ltd."', 
+ 637: '"aska corporation"', 
+ 638: '"Woodhead Connectivity"', 
+ 639: '"Trimble AB"', 
+ 640: '"Murrelektronik GmbH"', 
+ 641: '"Creatrix Labs, Inc."', 
+ 642: '"TopWorx"', 
+ 643: '"Kumho Industrial Co., Ltd."', 
+ 644: '"Wind River Systems, Inc."', 
+ 645: '"Bihl & Wiedemann GmbH"', 
+ 646: '"Harmonic Drive Systems Inc."', 
+ 647: '"Rikei Corporation"', 
+ 648: '"BL Autotec, Ltd."', 
+ 649: '"Hana Information & Technology Co., Ltd."', 
+ 650: '"Seoil Electric Co., Ltd."', 
+ 651: '"Fife Corporation"', 
+ 652: '"Shanghai Electrical Apparatus Research Institute"', 
+ 653: '"Reserved"', 
+ 654: '"Parasense Development Centre"', 
+ 655: '"Reserved"', 
+ 656: '"Reserved"', 
+ 657: '"Six Tau S.p.A."', 
+ 658: '"Aucos GmbH"', 
+ 659: '"Rotork Controls"', 
+ 660: '"Automationdirect.com"', 
+ 661: '"Thermo BLH"', 
+ 662: '"System Controls, Ltd."', 
+ 663: '"Univer S.p.A."', 
+ 664: '"MKS-Tenta Technology"', 
+ 665: '"Lika Electronic SNC"', 
+ 666: '"Mettler-Toledo, Inc."', 
+ 667: '"DXL USA Inc."', 
+ 668: '"Rockwell Automation/Entek IRD Intl."', 
+ 669: '"Nippon Otis Elevator Company"', 
+ 670: '"Sinano Electric, Co., Ltd."', 
+ 671: '"Sony Manufacturing Systems"', 
+ 672: '"Reserved"', 
+ 673: '"Contec Co., Ltd."', 
+ 674: '"Automated Solutions"', 
+ 675: '"Controlweigh"', 
+ 676: '"Reserved"', 
+ 677: '"Fincor Electronics"', 
+ 678: '"Cognex Corporation"', 
+ 679: '"Qualiflow"', 
+ 680: '"Weidmuller, Inc."', 
+ 681: '"Morinaga Milk Industry Co., Ltd."', 
+ 682: '"Takagi Industrial Co., Ltd."', 
+ 683: '"Wittenstein AG"', 
+ 684: '"Sena Technologies, Inc."', 
+ 685: '"Reserved"', 
+ 686: '"APV Products Unna"', 
+ 687: '"Creator Teknisk Utvedkling AB"', 
+ 688: '"Reserved"', 
+ 689: '"Mibu Denki Industrial Co., Ltd."', 
+ 690: '"Takamastsu Machineer Section"', 
+ 691: '"Startco Engineering Ltd."', 
+ 692: '"Reserved"', 
+ 693: '"Holjeron"', 
+ 694: '"ALCATEL High Vacuum Technology"', 
+ 695: '"Taesan LCD Co., Ltd."', 
+ 696: '"POSCON"', 
+ 697: '"VMIC"', 
+ 698: '"Matsushita Electric Works, Ltd."', 
+ 699: '"IAI Corporation"', 
+ 700: '"Horst GmbH"', 
+ 701: '"MicroControl GmbH & Co."', 
+ 702: '"Leine & Linde AB"', 
+ 703: '"Reserved"', 
+ 704: '"EC Elettronica Srl"', 
+ 705: '"VIT Software HB"', 
+ 706: '"Bronkhorst High-Tech B.V."', 
+ 707: '"Optex Co., Ltd."', 
+ 708: '"Yosio Electronic Co."', 
+ 709: '"Terasaki Electric Co., Ltd."', 
+ 710: '"Sodick Co., Ltd."', 
+ 711: '"MTS Systems Corporation-Automation Division"', 
+ 712: '"Mesa Systemtechnik"', 
+ 713: '"SHIN HO SYSTEM Co., Ltd."', 
+ 714: '"Goyo Electronics Co, Ltd."', 
+ 715: '"Loreme"', 
+ 716: '"SAB Brockskes GmbH & Co. KG"', 
+ 717: '"Trumpf Laser GmbH + Co. KG"', 
+ 718: '"Niigata Electronic Instruments Co., Ltd."', 
+ 719: '"Yokogawa Digital Computer Corporation"', 
+ 720: '"O.N. Electronic Co., Ltd."', 
+ 721: '"Industrial Control\\tCommunication, Inc."', 
+ 722: '"ABB, Inc."', 
+ 723: '"ElectroWave USA, Inc."', 
+ 724: '"Industrial Network Controls, LLC"', 
+ 725: '"KDT Systems Co., Ltd."', 
+ 726: '"SEFA Technology Inc."', 
+ 727: '"Nippon POP Rivets and Fasteners Ltd."', 
+ 728: '"Yamato Scale Co., Ltd."', 
+ 729: '"Zener Electric"', 
+ 730: '"GSE Scale Systems"', 
+ 731: '"ISAS (Integrated Switchgear & Sys. Pty Ltd)"', 
+ 732: '"Beta LaserMike Limited"', 
+ 733: '"TOEI Electric Co., Ltd."', 
+ 734: '"Hakko Electronics Co., Ltd"', 
+ 735: '"Reserved"', 
+ 736: '"RFID, Inc."', 
+ 737: '"Adwin Corporation"', 
+ 738: '"Osaka Vacuum, Ltd."', 
+ 739: '"A-Kyung Motion, Inc."', 
+ 740: '"Camozzi S.P. A."', 
+ 741: '"Crevis Co., LTD"', 
+ 742: '"Rice Lake Weighing Systems"', 
+ 743: '"Linux Network Services"', 
+ 744: '"KEB Antriebstechnik GmbH"', 
+ 745: '"Hagiwara Electric Co., Ltd."', 
+ 746: '"Glass Inc. International"', 
+ 747: '"Reserved"', 
+ 748: '"DVT Corporation"', 
+ 749: '"Woodward Governor"', 
+ 750: '"Mosaic Systems, Inc."', 
+ 751: '"Laserline GmbH"', 
+ 752: '"COM-TEC, Inc."', 
+ 753: '"Weed Instrument"', 
+ 754: '"Prof-face European Technology Center"', 
+ 755: '"Fuji Automation Co., Ltd."', 
+ 756: '"Matsutame Co., Ltd."', 
+ 757: '"Hitachi Via Mechanics, Ltd."', 
+ 758: '"Dainippon Screen Mfg. Co. Ltd."', 
+ 759: '"FLS Automation A/S"', 
+ 760: '"ABB Stotz Kontakt GmbH"', 
+ 761: '"Technical Marine Service"', 
+ 762: '"Advanced Automation Associates, Inc."', 
+ 763: '"Baumer Ident GmbH"', 
+ 764: '"Tsubakimoto Chain Co."', 
+ 765: '"Reserved"', 
+ 766: '"Furukawa Co., Ltd."', 
+ 767: '"Active Power"', 
+ 768: '"CSIRO Mining Automation"', 
+ 769: '"Matrix Integrated Systems"', 
+ 770: '"Digitronic Automationsanlagen GmbH"', 
+ 771: '"SICK STEGMANN Inc."', 
+ 772: '"TAE-Antriebstechnik GmbH"', 
+ 773: '"Electronic Solutions"', 
+ 774: '"Rocon L.L.C."', 
+ 775: '"Dijitized Communications Inc."', 
+ 776: '"Asahi Organic Chemicals Industry Co., Ltd."', 
+ 777: '"Hodensha"', 
+ 778: '"Harting, Inc. NA"', 
+ 779: '"Kubler GmbH"', 
+ 780: '"Yamatake Corporation"', 
+ 781: '"JEOL"', 
+ 782: '"Yamatake Industrial Systems Co., Ltd."', 
+ 783: '"HAEHNE Elektronische Messgerate GmbH"', 
+ 784: '"Ci Technologies Pty Ltd (for Pelamos Industries)"', 
+ 785: '"N. SCHLUMBERGER & CIE"', 
+ 786: '"Teijin Seiki Co., Ltd."', 
+ 787: '"DAIKIN Industries, Ltd"', 
+ 788: '"RyuSyo Industrial Co., Ltd."', 
+ 789: '"SAGINOMIYA SEISAKUSHO, INC."', 
+ 790: '"Seishin Engineering Co., Ltd."', 
+ 791: '"Japan Support System Ltd."', 
+ 792: '"Decsys"', 
+ 793: '"Metronix Messgerate u. Elektronik GmbH"', 
+ 794: '"Reserved"', 
+ 795: '"Vaccon Company, Inc."', 
+ 796: '"Siemens Energy & Automation, Inc."', 
+ 797: '"Ten X Technology, Inc."', 
+ 798: '"Tyco Electronics"', 
+ 799: '"Delta Power Electronics Center"', 
+ 800: '"Denker"', 
+ 801: '"Autonics Corporation"', 
+ 802: '"JFE Electronic Engineering Pty. Ltd."', 
+ 803: '"Reserved"', 
+ 804: '"Electro-Sensors, Inc."', 
+ 805: '"Digi International, Inc."', 
+ 806: '"Texas Instruments"', 
+ 807: '"ADTEC Plasma Technology Co., Ltd"', 
+ 808: '"SICK AG"', 
+ 809: '"Ethernet Peripherals, Inc."', 
+ 810: '"Animatics Corporation"', 
+ 811: '"Reserved"', 
+ 812: '"Process Control Corporation"', 
+ 813: '"SystemV. Inc."', 
+ 814: '"Danaher Motion SRL"', 
+ 815: '"SHINKAWA Sensor Technology, Inc."', 
+ 816: '"Tesch GmbH & Co. KG"', 
+ 817: '"Reserved"', 
+ 818: '"Trend Controls Systems Ltd."', 
+ 819: '"Guangzhou ZHIYUAN Electronic Co., Ltd."', 
+ 820: '"Mykrolis Corporation"', 
+ 821: '"Bethlehem Steel Corporation"', 
+ 822: '"KK ICP"', 
+ 823: '"Takemoto Denki Corporation"', 
+ 824: '"The Montalvo Corporation"', 
+ 825: '"Reserved"', 
+ 826: '"LEONI Special Cables GmbH"', 
+ 827: '"Reserved"', 
+ 828: '"ONO SOKKI CO.,LTD."', 
+ 829: '"Rockwell Samsung Automation"', 
+ 830: '"SHINDENGEN ELECTRIC MFG. CO. LTD"', 
+ 831: '"Origin Electric Co. Ltd."', 
+ 832: '"Quest Technical Solutions, Inc."', 
+ 833: '"LS Cable, Ltd."', 
+ 834: '"Enercon-Nord Electronic GmbH"', 
+ 835: '"Northwire Inc."', 
+ 836: '"Engel Elektroantriebe GmbH"', 
+ 837: '"The Stanley Works"', 
+ 838: '"Celesco Transducer Products, Inc."', 
+ 839: '"Chugoku Electric Wire and Cable Co."', 
+ 840: '"Kongsberg Simrad AS"', 
+ 841: '"Panduit Corporation"', 
+ 842: '"Spellman High Voltage Electronics Corp."', 
+ 843: '"Kokusai Electric Alpha Co., Ltd."', 
+ 844: '"Brooks Automation, Inc."', 
+ 845: '"ANYWIRE CORPORATION"', 
+ 846: '"Honda Electronics Co. Ltd"', 
+ 847: '"REO Elektronik AG"', 
+ 848: '"Fusion UV Systems, Inc."', 
+ 849: '"ASI Advanced Semiconductor Instruments GmbH"', 
+ 850: '"Datalogic, Inc."', 
+ 851: '"SoftPLC Corporation"', 
+ 852: '"Dynisco Instruments LLC"', 
+ 853: '"WEG Industrias SA"', 
+ 854: '"Frontline Test Equipment, Inc."', 
+ 855: '"Tamagawa Seiki Co., Ltd."', 
+ 856: '"Multi Computing Co., Ltd."', 
+ 857: '"RVSI"', 
+ 858: '"Commercial Timesharing Inc."', 
+ 859: '"Tennessee Rand Automation LLC"', 
+ 860: '"Wacogiken Co., Ltd"', 
+ 861: '"Reflex Integration Inc."', 
+ 862: '"Siemens AG, A&D PI Flow Instruments"', 
+ 863: '"G. Bachmann Electronic GmbH"', 
+ 864: '"NT International"', 
+ 865: '"Schweitzer Engineering Laboratories"', 
+ 866: '"ATR Industrie-Elektronik GmbH Co."', 
+ 867: '"PLASMATECH Co., Ltd"', 
+ 868: '"Reserved"', 
+ 869: '"GEMU GmbH & Co. KG"', 
+ 870: '"Alcorn McBride Inc."', 
+ 871: '"MORI SEIKI CO., LTD"', 
+ 872: '"NodeTech Systems Ltd"', 
+ 873: '"Emhart Teknologies"', 
+ 874: '"Cervis, Inc."', 
+ 875: '"FieldServer Technologies (Div Sierra Monitor Corp)"', 
+ 876: '"NEDAP Power Supplies"', 
+ 877: '"Nippon Sanso Corporation"', 
+ 878: '"Mitomi Giken Co., Ltd."', 
+ 879: '"PULS GmbH"', 
+ 880: '"Reserved"', 
+ 881: '"Japan Control Engineering Ltd"', 
+ 882: '"Embedded Systems Korea (Former Zues Emtek Co Ltd.)"', 
+ 883: '"Automa SRL"', 
+ 884: '"Harms+Wende GmbH & Co KG"', 
+ 885: '"SAE-STAHL GmbH"', 
+ 886: '"Microwave Data Systems"', 
+ 887: '"Bernecker + Rainer Industrie-Elektronik GmbH"', 
+ 888: '"Hiprom Technologies"', 
+ 889: '"Reserved"', 
+ 890: '"Nitta Corporation"', 
+ 891: '"Kontron Modular Computers GmbH"', 
+ 892: '"Marlin Controls"', 
+ 893: '"ELCIS s.r.l."', 
+ 894: '"Acromag, Inc."', 
+ 895: '"Avery Weigh-Tronix"', 
+ 896: '"Reserved"', 
+ 897: '"Reserved"', 
+ 898: '"Reserved"', 
+ 899: '"Practicon Ltd"', 
+ 900: '"Schunk GmbH & Co. KG"', 
+ 901: '"MYNAH Technologies"', 
+ 902: '"Defontaine Groupe"', 
+ 903: '"Emerson Process Management Power & Water Solutions"', 
+ 904: '"F.A. Elec"', 
+ 905: '"Hottinger Baldwin Messtechnik GmbH"', 
+ 906: '"Coreco Imaging, Inc."', 
+ 907: '"London Electronics Ltd."', 
+ 908: '"HSD SpA"', 
+ 909: '"Comtrol Corporation"', 
+ 910: '"TEAM, S.A. (Tecnica Electronica de Automatismo Y Medida)"', 
+ 911: '"MAN B&W Diesel Ltd. Regulateurs Europa"', 
+ 912: '"Reserved"', 
+ 913: '"Reserved"', 
+ 914: '"Micro Motion, Inc."', 
+ 915: '"Eckelmann AG"', 
+ 916: '"Hanyoung Nux"', 
+ 917: '"Ransburg Industrial Finishing KK"', 
+ 918: '"Kun Hung Electric Co. Ltd."', 
+ 919: '"Brimos wegbebakening b.v."', 
+ 920: '"Nitto Seiki Co., Ltd"', 
+ 921: '"PPT Vision, Inc."', 
+ 922: '"Yamazaki Machinery Works"', 
+ 923: '"SCHMIDT Technology GmbH"', 
+ 924: '"Parker Hannifin SpA (SBC Division)"', 
+ 925: '"HIMA Paul Hildebrandt GmbH"', 
+ 926: '"RivaTek, Inc."', 
+ 927: '"Misumi Corporation"', 
+ 928: '"GE Multilin"', 
+ 929: '"Measurement Computing Corporation"', 
+ 930: '"Jetter AG"', 
+ 931: '"Tokyo Electronics Systems Corporation"', 
+ 932: '"Togami Electric Mfg. Co., Ltd."', 
+ 933: '"HK Systems"', 
+ 934: '"CDA Systems Ltd."', 
+ 935: '"Aerotech Inc."', 
+ 936: '"JVL Industrie Elektronik A/S"', 
+ 937: '"NovaTech Process Solutions LLC"', 
+ 938: '"Reserved"', 
+ 939: '"Cisco Systems"', 
+ 940: '"Grid Connect"', 
+ 941: '"ITW Automotive Finishing"', 
+ 942: '"HanYang System"', 
+ 943: '"ABB K.K. Technical Center"', 
+ 944: '"Taiyo Electric Wire & Cable Co., Ltd."', 
+ 945: '"Reserved"', 
+ 946: '"SEREN IPS INC"', 
+ 947: '"Belden CDT Electronics Division"', 
+ 948: '"ControlNet International"', 
+ 949: '"Gefran S.P.A."', 
+ 950: '"Jokab Safety AB"', 
+ 951: '"SUMITA OPTICAL GLASS, INC."', 
+ 952: '"Biffi Italia srl"', 
+ 953: '"Beck IPC GmbH"', 
+ 954: '"Copley Controls Corporation"', 
+ 955: '"Fagor Automation S. Coop."', 
+ 956: '"DARCOM"', 
+ 957: '"Frick Controls (div. of York International)"', 
+ 958: '"SymCom, Inc."', 
+ 959: '"Infranor"', 
+ 960: '"Kyosan Cable, Ltd."', 
+ 961: '"Varian Vacuum Technologies"', 
+ 962: '"Messung Systems"', 
+ 963: '"Xantrex Technology, Inc."', 
+ 964: '"StarThis Inc."', 
+ 965: '"Chiyoda Co., Ltd."', 
+ 966: '"Flowserve Corporation"', 
+ 967: '"Spyder Controls Corp."', 
+ 968: '"IBA AG"', 
+ 969: '"SHIMOHIRA ELECTRIC MFG.CO.,LTD"', 
+ 970: '"Reserved"', 
+ 971: '"Siemens L&A"', 
+ 972: '"Micro Innovations AG"', 
+ 973: '"Switchgear & Instrumentation"', 
+ 974: '"PRE-TECH CO., LTD."', 
+ 975: '"National Semiconductor"', 
+ 976: '"Invensys Process Systems"', 
+ 977: '"Ametek HDR Power Systems"', 
+ 978: '"Reserved"', 
+ 979: '"TETRA-K Corporation"', 
+ 980: '"C & M Corporation"', 
+ 981: '"Siempelkamp Maschinen"', 
+ 982: '"Reserved"', 
+ 983: '"Daifuku America Corporation"', 
+ 984: '"Electro-Matic Products Inc."', 
+ 985: '"BUSSAN MICROELECTRONICS CORP."', 
+ 986: '"ELAU AG"', 
+ 987: '"Hetronic USA"', 
+ 988: '"NIIGATA POWER SYSTEMS Co., Ltd."', 
+ 989: '"Software Horizons Inc."', 
+ 990: '"B3 Systems, Inc."', 
+ 991: '"Moxa Networking Co., Ltd."', 
+ 992: '"Reserved"', 
+ 993: '"S4 Integration"', 
+ 994: '"Elettro Stemi S.R.L."', 
+ 995: '"AquaSensors"', 
+ 996: '"Ifak System GmbH"', 
+ 997: '"SANKEI MANUFACTURING Co.,LTD."', 
+ 998: '"Emerson Network Power Co., Ltd."', 
+ 999: '"Fairmount Automation, Inc."', 
+ 1000: '"Bird Electronic Corporation"', 
+ 1001: '"Nabtesco Corporation"', 
+ 1002: '"AGM Electronics, Inc."', 
+ 1003: '"ARCX Inc."', 
+ 1004: '"DELTA I/O Co."', 
+ 1005: '"Chun IL Electric Ind. Co."', 
+ 1006: '"N-Tron"', 
+ 1007: '"Nippon Pneumatics/Fludics System CO.,LTD."', 
+ 1008: '"DDK Ltd."', 
+ 1009: '"Seiko Epson Corporation"', 
+ 1010: '"Halstrup-Walcher GmbH"', 
+ 1011: '"ITT"', 
+ 1012: '"Ground Fault Systems bv"', 
+ 1013: '"Scolari Engineering S.p.A."', 
+ 1014: '"Vialis Traffic bv"', 
+ 1015: '"Weidmueller Interface GmbH & Co. KG"', 
+ 1016: '"Shanghai Sibotech Automation Co. Ltd"', 
+ 1017: '"AEG Power Supply Systems GmbH"', 
+ 1018: '"Komatsu Electronics Inc."', 
+ 1019: '"Souriau"', 
+ 1020: '"Baumuller Chicago Corp."', 
+ 1021: '"J. Schmalz GmbH"', 
+ 1022: '"SEN Corporation"', 
+ 1023: '"Korenix Technology Co. Ltd"', 
+ 1024: '"Cooper Power Tools"', 
+ 1025: '"INNOBIS"', 
+ 1026: '"Shinho System"', 
+ 1027: '"Xm Services Ltd."', 
+ 1028: '"KVC Co., Ltd."', 
+ 1029: '"Sanyu Seiki Co., Ltd."', 
+ 1030: '"TuxPLC"', 
+ 1031: '"Northern Network Solutions"', 
+ 1032: '"Converteam GmbH"', 
+ 1033: '"Symbol Technologies"', 
+ 1034: '"S-TEAM Lab"', 
+ 1035: '"Maguire Products, Inc."', 
+ 1036: '"AC&T"', 
+ 1037: '"MITSUBISHI HEAVY INDUSTRIES, LTD. KOBE SHIPYARD & MACHINERY WORKS"', 
+ 1038: '"Hurletron Inc."', 
+ 1039: '"Chunichi Denshi Co., Ltd"', 
+ 1040: '"Cardinal Scale Mfg. Co."', 
+ 1041: '"BTR NETCOM via RIA Connect, Inc."', 
+ 1042: '"Base2"', 
+ 1043: '"ASRC Aerospace"', 
+ 1044: '"Beijing Stone Automation"', 
+ 1045: '"Changshu Switchgear Manufacture Ltd."', 
+ 1046: '"METRONIX Corp."', 
+ 1047: '"WIT"', 
+ 1048: '"ORMEC Systems Corp."', 
+ 1049: '"ASATech (China) Inc."', 
+ 1050: '"Controlled Systems Limited"', 
+ 1051: '"Mitsubishi Heavy Ind. Digital System Co., Ltd. (M.H.I.)"', 
+ 1052: '"Electrogrip"', 
+ 1053: '"TDS Automation"', 
+ 1054: '"T&C Power Conversion, Inc."', 
+ 1055: '"Robostar Co., Ltd"', 
+ 1056: '"Scancon A/S"', 
+ 1057: '"Haas Automation, Inc."', 
+ 1058: '"Eshed Technology"', 
+ 1059: '"Delta Electronic Inc."', 
+ 1060: '"Innovasic Semiconductor"', 
+ 1061: '"SoftDEL Systems Limited"', 
+ 1062: '"FiberFin, Inc."', 
+ 1063: '"Nicollet Technologies Corp."', 
+ 1064: '"B.F. Systems"', 
+ 1065: '"Empire Wire and Supply LLC"', 
+ 1066: '"Reserved"', 
+ 1067: '"Elmo Motion Control LTD"', 
+ 1068: '"Reserved"', 
+ 1069: '"Asahi Keiki Co., Ltd."', 
+ 1070: '"Joy Mining Machinery"', 
+ 1071: '"MPM Engineering Ltd"', 
+ 1072: '"Wolke Inks & Printers GmbH"', 
+ 1073: '"Mitsubishi Electric Engineering Co., Ltd."', 
+ 1074: '"COMET AG"', 
+ 1075: '"Real Time Objects & Systems, LLC"', 
+ 1076: '"MISCO Refractometer"', 
+ 1077: '"JT Engineering Inc."', 
+ 1078: '"Automated Packing Systems"', 
+ 1079: '"Niobrara R&D Corp."', 
+ 1080: '"Garmin Ltd."', 
+ 1081: '"Japan Mobile Platform Co., Ltd"', 
+ 1082: '"Advosol Inc."', 
+ 1083: '"ABB Global Services Limited"', 
+ 1084: '"Sciemetric Instruments Inc."', 
+ 1085: '"Tata Elxsi Ltd."', 
+ 1086: '"TPC Mechatronics, Co., Ltd."', 
+ 1087: '"Cooper Bussmann"', 
+ 1088: '"Trinite Automatisering B.V."', 
+ 1089: '"Peek Traffic B.V."', 
+ 1090: '"Acrison, Inc"', 
+ 1091: '"Applied Robotics, Inc."', 
+ 1092: '"FireBus Systems, Inc."', 
+ 1093: '"Beijing Sevenstar Huachuang Electronics"', 
+ 1094: '"Magnetek"', 
+ 1095: '"Microscan"', 
+ 1096: '"Air Water Inc."', 
+ 1097: '"Sensopart Industriesensorik GmbH"', 
+ 1098: '"Tiefenbach Control Systems GmbH"', 
+ 1099: '"INOXPA S.A"', 
+ 1100: '"Zurich University of Applied Sciences"', 
+ 1101: '"Ethernet Direct"', 
+ 1102: '"GSI-Micro-E Systems"', 
+ 1103: '"S-Net Automation Co., Ltd."', 
+ 1104: '"Power Electronics S.L."', 
+ 1105: '"Renesas Technology Corp."', 
+ 1106: '"NSWCCD-SSES"', 
+ 1107: '"Porter Engineering Ltd."', 
+ 1108: '"Meggitt Airdynamics, Inc."', 
+ 1109: '"Inductive Automation"', 
+ 1110: '"Neural ID"', 
+ 1111: '"EEPod LLC"', 
+ 1112: '"Hitachi Industrial Equipment Systems Co., Ltd."', 
+ 1113: '"Salem Automation"', 
+ 1114: '"port GmbH"', 
+ 1115: '"B & PLUS"', 
+ 1116: '"Graco Inc."', 
+ 1117: '"Altera Corporation"', 
+ 1118: '"Technology Brewing Corporation"', 
+ 1121: '"CSE Servelec"', 
+ 1124: '"Fluke Networks"', 
+ 1125: '"Tetra Pak Packaging Solutions SPA"', 
+ 1126: '"Racine Federated, Inc."', 
+ 1127: '"Pureron Japan Co., Ltd."', 
+ 1130: '"Brother Industries, Ltd."', 
+ 1132: '"Leroy Automation"', 
+ 1137: '"TR-Electronic GmbH"', 
+ 1138: '"ASCON S.p.A."', 
+ 1139: '"Toledo do Brasil Industria de Balancas Ltda."', 
+ 1140: '"Bucyrus DBT Europe GmbH"', 
+ 1141: '"Emerson Process Management Valve Automation"', 
+ 1142: '"Alstom Transport"', 
+ 1144: '"Matrox Electronic Systems"', 
+ 1145: '"Littelfuse"', 
+ 1146: '"PLASMART, Inc."', 
+ 1147: '"Miyachi Corporation"', 
+ 1150: '"Promess Incorporated"', 
+ 1151: '"COPA-DATA GmbH"', 
+ 1152: '"Precision Engine Controls Corporation"', 
+ 1153: '"Alga Automacao e controle LTDA"', 
+ 1154: '"U.I. Lapp GmbH"', 
+ 1155: '"ICES"', 
+ 1156: '"Philips Lighting bv"', 
+ 1157: '"Aseptomag AG"', 
+ 1158: '"ARC Informatique"', 
+ 1159: '"Hesmor GmbH"', 
+ 1160: '"Kobe Steel, Ltd."', 
+ 1161: '"FLIR Systems"', 
+ 1162: '"Simcon A/S"', 
+ 1163: '"COPALP"', 
+ 1164: '"Zypcom, Inc."', 
+ 1165: '"Swagelok"', 
+ 1166: '"Elspec"', 
+ 1167: '"ITT Water & Wastewater AB"', 
+ 1168: '"Kunbus GmbH Industrial Communication"', 
+ 1170: '"Performance Controls, Inc."', 
+ 1171: '"ACS Motion Control, Ltd."', 
+ 1173: '"IStar Technology Limited"', 
+ 1174: '"Alicat Scientific, Inc."', 
+ 1176: '"ADFweb.com SRL"', 
+ 1177: '"Tata Consultancy Services Limited"', 
+ 1178: '"CXR Ltd."', 
+ 1179: '"Vishay Nobel AB"', 
+ 1181: '"SolaHD"', 
+ 1182: '"Endress+Hauser"', 
+ 1183: '"Bartec GmbH"', 
+ 1185: '"AccuSentry, Inc."', 
+ 1186: '"Exlar Corporation"', 
+ 1187: '"ILS Technology"', 
+ 1188: '"Control Concepts Inc."', 
+ 1190: '"Procon Engineering Limited"', 
+ 1191: '"Hermary Opto Electronics Inc."', 
+ 1192: '"Q-Lambda"', 
+ 1194: '"VAMP Ltd"', 
+ 1195: '"FlexLink"', 
+ 1196: '"Office FA.com Co., Ltd."', 
+ 1197: '"SPMC (Changzhou) Co. Ltd."', 
+ 1198: '"Anton Paar GmbH"', 
+ 1199: '"Zhuzhou CSR Times Electric Co., Ltd."', 
+ 1200: '"DeStaCo"', 
+ 1201: '"Synrad, Inc"', 
+ 1202: '"Bonfiglioli Vectron GmbH"', 
+ 1203: '"Pivotal Systems"', 
+ 1204: '"TKSCT"', 
+ 1205: '"Randy Nuernberger"', 
+ 1206: '"CENTRALP"', 
+ 1207: '"Tengen Group"', 
+ 1208: '"OES, Inc."', 
+ 1209: '"Actel Corporation"', 
+ 1210: '"Monaghan Engineering, Inc."', 
+ 1211: '"wenglor sensoric gmbh"', 
+ 1212: '"HSA Systems"', 
+ 1213: '"MK Precision Co., Ltd."', 
+ 1214: '"Tappan Wire and Cable"', 
+ 1215: '"Heinzmann GmbH & Co. KG"', 
+ 1216: '"Process Automation International Ltd."', 
+ 1217: '"Secure Crossing"', 
+ 1218: '"SMA Railway Technology GmbH"', 
+ 1219: '"FMS Force Measuring Systems AG"', 
+ 1220: '"ABT Endustri Enerji Sistemleri Sanayi Tic. Ltd. Sti."', 
+ 1221: '"MagneMotion Inc."', 
+ 1222: '"STS Co., Ltd."', 
+ 1223: '"MERAK SIC, SA"', 
+ 1224: '"ABOUNDI, Inc."', 
+ 1225: '"Rosemount Inc."', 
+ 1226: '"GEA FES, Inc."', 
+ 1227: '"TMG Technologie und Engineering GmbH"', 
+ 1228: '"embeX GmbH"', 
+ 1229: '"GH Electrotermia, S.A."', 
+ 1230: '"Tolomatic"', 
+ 1231: '"Dukane"', 
+ 1232: '"Elco (Tian Jin) Electronics Co., Ltd."', 
+ 1233: '"Jacobs Automation"', 
+ 1234: '"Noda Radio Frequency Technologies Co., Ltd."', 
+ 1235: '"MSC Tuttlingen GmbH"', 
+ 1236: '"Hitachi Cable Manchester"', 
+ 1237: '"ACOREL SAS"', 
+ 1238: '"Global Engineering Solutions Co., Ltd."', 
+ 1239: '"ALTE Transportation, S.L."', 
+ 1240: '"Penko Engineering B.V."'}
diff --git a/APPS_UNCOMPILED/lib/pycomm3/exceptions.py b/APPS_UNCOMPILED/lib/pycomm3/exceptions.py
new file mode 100644
index 0000000..3359a07
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/exceptions.py
@@ -0,0 +1,23 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/exceptions.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 1666 bytes
+
+
+class PycommError(Exception):
+    __doc__ = "\n    Base exception for all exceptions raised by pycomm3\n    "
+
+
+class CommError(PycommError):
+    __doc__ = "\n    For exceptions raised during connection related issues\n    "
+
+
+class DataError(PycommError):
+    __doc__ = "\n    For exceptions raised during handling for responses to requests\n    "
+
+
+class RequestError(PycommError):
+    __doc__ = "\n    For exceptions raised due to issues building requests or processing of user supplied data\n    "
diff --git a/APPS_UNCOMPILED/lib/pycomm3/map.py b/APPS_UNCOMPILED/lib/pycomm3/map.py
new file mode 100644
index 0000000..16c5742
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/map.py
@@ -0,0 +1,34 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/map.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 3457 bytes
+__all__ = [
+ "EnumMap"]
+
+class MapMeta(type):
+
+    def __new__Parse error at or near `LOAD_DICTCOMP' instruction at offset 40
+
+    def __getitem__(self, item):
+        val = self._members_.__getitem__(item.lower() if isinstance(item, str) else item)
+        if self._return_caps_only_:
+            if isinstance(val, str):
+                val = val.upper()
+        return val
+
+    def get(cls, item, default=None):
+        val = cls._members_.get(item.lower() if isinstance(item, str) else item, default)
+        if cls._return_caps_only_:
+            if isinstance(val, str):
+                val = val.upper()
+        return val
+
+    def __contains__(self, item):
+        return self._members_.__contains__(item.lower() if isinstance(item, str) else item)
+
+
+class EnumMap(metaclass=MapMeta):
+    __doc__ = "\n    A simple enum-like class that allows dict-like __getitem__() and get() lookups.\n    __getitem__() and get() are case-insensitive and bidirectional\n\n    example:\n\n    class TestEnum(Pycomm3EnumMap):\n        x = 100\n\n    >>> TestEnum.x\n    100\n    >>> TestEnum['X']\n    100\n    >>> TestEnum[100]\n    x\n\n    Note: this class is really only to be used internally, it doesn't cover anything more than simple subclasses\n    (as in attributes only, don't add methods except for classmethods)\n    It's really just to provide dict-like item access with enum-like attributes.\n\n    "
\ No newline at end of file
diff --git a/APPS_UNCOMPILED/lib/pycomm3/packets/__init__.py b/APPS_UNCOMPILED/lib/pycomm3/packets/__init__.py
new file mode 100644
index 0000000..62d1ad5
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/packets/__init__.py
@@ -0,0 +1,31 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/packets/__init__.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 3185 bytes
+import logging
+from typing import List, Tuple, Optional, Union
+DataFormatType = List[Tuple[(Optional[str], Union[(str, int)])]]
+
+class Packet:
+    _Packet__log = logging.getLogger(__qualname__)
+
+
+from .responses import ResponsePacket, SendUnitDataResponsePacket, SendRRDataResponsePacket, ListIdentityResponsePacket, RegisterSessionResponsePacket, UnRegisterSessionResponsePacket, ReadTagServiceResponsePacket, MultiServiceResponsePacket, ReadTagFragmentedServiceResponsePacket, GenericConnectedResponsePacket, WriteTagServiceResponsePacket, WriteTagFragmentedServiceResponsePacket, GenericUnconnectedResponsePacket, get_extended_status, get_service_status
+from .requests import RequestPacket, SendUnitDataRequestPacket, SendRRDataRequestPacket, ListIdentityRequestPacket, RegisterSessionRequestPacket, UnRegisterSessionRequestPacket, ReadTagServiceRequestPacket, MultiServiceRequestPacket, ReadTagFragmentedServiceRequestPacket, WriteTagServiceRequestPacket, WriteTagFragmentedServiceRequestPacket, GenericConnectedRequestPacket, GenericUnconnectedRequestPacket, request_path
+from collections import defaultdict
+REQUEST_MAP = defaultdict(RequestPacket, {
+ 'send_unit_data': SendUnitDataRequestPacket, 
+ 'send_rr_data': SendRRDataRequestPacket, 
+ 'register_session': RegisterSessionRequestPacket, 
+ 'unregister_session': UnRegisterSessionRequestPacket, 
+ 'list_identity': ListIdentityRequestPacket, 
+ 'read_tag': ReadTagServiceRequestPacket, 
+ 'multi_request': MultiServiceRequestPacket, 
+ 'read_tag_fragmented': ReadTagFragmentedServiceRequestPacket, 
+ 'write_tag': WriteTagServiceRequestPacket, 
+ 'write_tag_fragmented': WriteTagFragmentedServiceRequestPacket, 
+ 'generic_connected': GenericConnectedRequestPacket, 
+ 'generic_unconnected': GenericUnconnectedRequestPacket})
diff --git a/APPS_UNCOMPILED/lib/pycomm3/packets/requests.py b/APPS_UNCOMPILED/lib/pycomm3/packets/requests.py
new file mode 100644
index 0000000..57a0d3a
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/packets/requests.py
@@ -0,0 +1,661 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/packets/requests.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 26992 bytes
+import logging
+from typing import Union
+from reprlib import repr as _r
+from . import Packet, DataFormatType
+from . import ResponsePacket, SendUnitDataResponsePacket, ReadTagServiceResponsePacket, RegisterSessionResponsePacket, UnRegisterSessionResponsePacket, ListIdentityResponsePacket, SendRRDataResponsePacket, MultiServiceResponsePacket, ReadTagFragmentedServiceResponsePacket, WriteTagServiceResponsePacket, WriteTagFragmentedServiceResponsePacket, GenericUnconnectedResponsePacket, GenericConnectedResponsePacket
+from ..exceptions import CommError, RequestError
+from ..bytes_ import Pack, print_bytes_msg
+from ..const import EncapsulationCommand, INSUFFICIENT_PACKETS, DataItem, AddressItem, EXTENDED_SYMBOL, ELEMENT_TYPE, TagService, CLASS_TYPE, INSTANCE_TYPE, DataType, DataTypeSize, ConnectionManagerService, ClassCode, CommonService, STRUCTURE_READ_REPLY, PRIORITY, TIMEOUT_TICKS, ATTRIBUTE_TYPE
+
+class RequestPacket(Packet):
+    _RequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _message_type = None
+    _address_type = None
+    _timeout = b'\n\x00'
+    _encap_command = None
+    _response_class = ResponsePacket
+    _response_args = ()
+    _response_kwargs = {}
+    type_ = None
+    VERBOSE_DEBUG = False
+
+    def __init__(self, plc):
+        super().__init__()
+        self._msg = []
+        self._plc = plc
+        self.error = None
+
+    def add(self, *value: bytes):
+        self._msg.extend(value)
+        return self
+
+    @property
+    def message(self) -> bytes:
+        return (b'').join(self._msg)
+
+    def _build_request(self):
+        msg = self._build_common_packet_format(addr_data=(self._plc._target_cid))
+        header = self._build_header(self._encap_command, len(msg))
+        return header + msg
+
+    def _build_header(self, command, length) -> bytes:
+        """ Build the encapsulate message header
+
+        The header is 24 bytes fixed length, and includes the command and the length of the optional data portion.
+
+         :return: the header
+        """
+        try:
+            return (b'').join([
+             command,
+             Pack.uint(length),
+             Pack.udint(self._plc._session),
+             b'\x00\x00\x00\x00',
+             self._plc._cfg["context"],
+             Pack.udint(self._plc._cfg["option"])])
+        except Exception as err:
+            try:
+                raise CommError("Failed to build request header") from err
+            finally:
+                err = None
+                del err
+
+    def _build_common_packet_format(self, addr_data=None) -> bytes:
+        addr_data = b'\x00\x00' if addr_data is None else Pack.uint(len(addr_data)) + addr_data
+        msg = self.message
+        return (b'').join([
+         b'\x00\x00\x00\x00',
+         self._timeout,
+         b'\x02\x00',
+         self._address_type,
+         addr_data,
+         self._message_type,
+         Pack.uint(len(msg)),
+         msg])
+
+    def _send(self, message):
+        """
+                socket send
+                :return: true if no error otherwise false
+                """
+        try:
+            if self.VERBOSE_DEBUG:
+                self._RequestPacket__log.debug(print_bytes_msg(message, ">>> SEND >>>"))
+            self._plc._sock.send(message)
+        except Exception as err:
+            try:
+                raise CommError("failed to send message") from err
+            finally:
+                err = None
+                del err
+
+    def _receive(self):
+        """
+        socket receive
+        :return: reply data
+        """
+        try:
+            reply = self._plc._sock.receive()
+        except Exception as err:
+            try:
+                raise CommError("failed to receive reply") from err
+            finally:
+                err = None
+                del err
+
+        else:
+            if self.VERBOSE_DEBUG:
+                self._RequestPacket__log.debug(print_bytes_msg(reply, "<<< RECEIVE <<<"))
+            return reply
+
+    def send(self) -> ResponsePacket:
+        if not self.error:
+            self._send(self._build_request())
+            self._RequestPacket__log.debug(f"Sent: {self!r}")
+            reply = self._receive()
+            response = (self._response_class)(reply, *(self._response_args), **self._response_kwargs)
+        else:
+            response = (self._response_class)(*self._response_args, **self._response_kwargs)
+            response._error = self.error
+        self._RequestPacket__log.debug(f"Received: {response!r}")
+        return response
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(message={_r(self._msg)})"
+
+    __str__ = __repr__
+
+
+class SendUnitDataRequestPacket(RequestPacket):
+    _SendUnitDataRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _message_type = DataItem.connected
+    _address_type = AddressItem.connection
+    _response_class = SendUnitDataResponsePacket
+    _encap_command = EncapsulationCommand.send_unit_data
+
+    def __init__(self, plc):
+        super().__init__(plc)
+        self._msg = [Pack.uint(plc._sequence)]
+
+
+class ReadTagServiceRequestPacket(SendUnitDataRequestPacket):
+    _ReadTagServiceRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    type_ = "read"
+    _response_class = ReadTagServiceResponsePacket
+
+    def __init__(self, plc):
+        super().__init__(plc)
+        self.tag = None
+        self.elements = None
+        self.tag_info = None
+
+    def add(self, tag, elements=1, tag_info=None):
+        self.tag = tag
+        self.elements = elements
+        self.tag_info = tag_info
+        request_path = _create_tag_rp(self.tag, self._plc.tags, self._plc.use_instance_ids)
+        if request_path is None:
+            self.error = "Invalid Tag Request Path"
+        super().add(TagService.read_tag, request_path, Pack.uint(self.elements))
+
+    def send(self):
+        if not self.error:
+            self._send(self._build_request())
+            self._ReadTagServiceRequestPacket__log.debug(f"Sent: {self!r}")
+            reply = self._receive()
+            response = ReadTagServiceResponsePacket(reply, elements=(self.elements), tag_info=(self.tag_info), tag=(self.tag))
+        else:
+            response = ReadTagServiceResponsePacket(tag=(self.tag))
+            response._error = self.error
+        self._ReadTagServiceRequestPacket__log.debug(f"Received: {response!r}")
+        return response
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(tag={self.tag!r}, elements={self.elements!r})"
+
+
+class ReadTagFragmentedServiceRequestPacket(SendUnitDataRequestPacket):
+    _ReadTagFragmentedServiceRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    type_ = "read"
+    _response_class = ReadTagFragmentedServiceResponsePacket
+
+    def __init__(self, plc):
+        super().__init__(plc)
+        self.tag = None
+        self.elements = None
+        self.tag_info = None
+        self.request_path = None
+
+    def add(self, tag, elements=1, tag_info=None):
+        self.tag = tag
+        self.elements = elements
+        self.tag_info = tag_info
+        self.request_path = _create_tag_rp(self.tag, self._plc.tags, self._plc.use_instance_ids)
+        if self.request_path is None:
+            self.error = "Invalid Tag Request Path"
+
+    def send(self):
+        if not self.error:
+            offset = 0
+            responses = []
+            while offset is not None:
+                self._msg.extend([TagService.read_tag_fragmented,
+                 self.request_path,
+                 Pack.uint(self.elements),
+                 Pack.dint(offset)])
+                self._send(self._build_request())
+                self._ReadTagFragmentedServiceRequestPacket__log.debug(f"Sent: {self!r} (offset={offset})")
+                reply = self._receive()
+                response = ReadTagFragmentedServiceResponsePacket(reply, self.tag_info, self.elements)
+                self._ReadTagFragmentedServiceRequestPacket__log.debug(f"Received: {response!r}")
+                responses.append(response)
+                if response.service_status == INSUFFICIENT_PACKETS:
+                    offset += len(response.bytes_)
+                    self._msg = [Pack.uint(self._plc._sequence)]
+                else:
+                    offset = None
+
+            if all(responses):
+                final_response = responses[-1]
+                final_response.bytes_ = (b'').join((resp.bytes_ for resp in responses))
+                final_response.parse_bytes()
+                self._ReadTagFragmentedServiceRequestPacket__log.debug(f"Reassembled Response: {final_response!r}")
+                return final_response
+        failed_response = ReadTagServiceResponsePacket()
+        failed_response._error = self.error or "One or more fragment responses failed"
+        self._ReadTagFragmentedServiceRequestPacket__log.debug(f"Reassembled Response: {failed_response!r}")
+        return failed_response
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(tag={self.tag!r}, elements={self.elements!r})"
+
+
+class WriteTagServiceRequestPacket(SendUnitDataRequestPacket):
+    _WriteTagServiceRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    type_ = "write"
+    _response_class = WriteTagServiceResponsePacket
+
+    def __init__(self, plc):
+        super().__init__(plc)
+        self.tag = None
+        self.elements = None
+        self.tag_info = None
+        self.value = None
+        self.data_type = None
+
+    def add(self, tag, value, elements=1, tag_info=None, bits_write=None):
+        self.tag = tag
+        self.elements = elements
+        self.tag_info = tag_info
+        self.value = value
+        request_path = _create_tag_rp(self.tag, self._plc.tags, self._plc.use_instance_ids)
+        if request_path is None:
+            self.error = "Invalid Tag Request Path"
+        else:
+            if bits_write:
+                request_path = _make_write_data_bit(tag_info, value, request_path)
+                data_type = "BOOL"
+            else:
+                request_path, data_type = _make_write_data_tag(tag_info, value, elements, request_path)
+            super().add(request_path)
+            self.data_type = data_type
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(tag={self.tag!r}, value={_r(self.value)}, elements={self.elements!r})"
+
+
+class WriteTagFragmentedServiceRequestPacket(SendUnitDataRequestPacket):
+    _WriteTagFragmentedServiceRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    type_ = "write"
+    _response_class = WriteTagFragmentedServiceResponsePacket
+
+    def __init__(self, plc):
+        super().__init__(plc)
+        self.tag = None
+        self.value = None
+        self.elements = None
+        self.tag_info = None
+        self.request_path = None
+        self.data_type = None
+        self.segment_size = None
+
+    def add(self, tag, value, elements=1, tag_info=None):
+        try:
+            if tag_info["tag_type"] == "struct":
+                self._packed_type = STRUCTURE_READ_REPLY + Pack.uint(tag_info["data_type"]["template"]["structure_handle"])
+                self.data_type = tag_info["data_type"]["name"]
+            else:
+                self._packed_type = Pack.uint(DataType[self.data_type])
+                self.data_type = tag_info["data_type"]
+            self.tag = tag
+            self.value = value
+            self.elements = elements
+            self.tag_info = tag_info
+            self.request_path = _create_tag_rp(self.tag, self._plc.tags, self._plc.use_instance_ids)
+            if self.request_path is None:
+                self.error = "Invalid Tag Request Path"
+        except Exception as err:
+            try:
+                self._WriteTagFragmentedServiceRequestPacket__log.exception("Failed adding request")
+                self.error = err
+            finally:
+                err = None
+                del err
+
+    def send(self):
+        if not self.error:
+            responses = []
+            segment_size = self._plc.connection_size - (len(self.request_path) + len(self._packed_type) + 9)
+            pack_func = Pack[self.data_type] if self.tag_info["tag_type"] == "atomic" else (lambda x: x)
+            segments = (self.value[i[:i + segment_size]] for i in range(0, len(self.value), segment_size))
+            offset = 0
+            elements_packed = Pack.uint(self.elements)
+            for i, segment in enumerate(segments, start=1):
+                segment_bytes = (b'').join((pack_func(s) for s in segment)) if not isinstance(segment, bytes) else segment
+                self._msg.extend((
+                 TagService.write_tag_fragmented,
+                 self.request_path,
+                 self._packed_type,
+                 elements_packed,
+                 Pack.dint(offset),
+                 segment_bytes))
+                self._send(self._build_request())
+                self._WriteTagFragmentedServiceRequestPacket__log.debug(f"Sent: {self!r} (part={i} offset={offset})")
+                reply = self._receive()
+                response = WriteTagFragmentedServiceResponsePacket(reply)
+                self._WriteTagFragmentedServiceRequestPacket__log.debug(f"Received: {response!r}")
+                responses.append(response)
+                offset += len(segment_bytes)
+                self._msg = [Pack.uint(self._plc._sequence)]
+
+            if all(responses):
+                final_response = responses[-1]
+                self._WriteTagFragmentedServiceRequestPacket__log.debug(f"Reassembled Response: {final_response!r}")
+                return final_response
+        failed_response = WriteTagFragmentedServiceResponsePacket()
+        failed_response._error = self.error or "One or more fragment responses failed"
+        self._WriteTagFragmentedServiceRequestPacket__log.debug(f"Reassembled Response: {failed_response!r}")
+        return failed_response
+
+
+class MultiServiceRequestPacket(SendUnitDataRequestPacket):
+    _MultiServiceRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    type_ = "multi"
+    _response_class = MultiServiceResponsePacket
+
+    def __init__(self, plc):
+        super().__init__(plc)
+        self.tags = []
+        self._msg.extend((
+         CommonService.multiple_service_request,
+         Pack.usint(2),
+         CLASS_TYPE["8-bit"],
+         ClassCode.message_router,
+         INSTANCE_TYPE["8-bit"],
+         b'\x01'))
+        self._message = None
+        self._msg_errors = None
+
+    @property
+    def message(self) -> bytes:
+        return self._message
+
+    def build_message(self, tags):
+        rp_list, errors = [], []
+        for tag in tags:
+            if tag["rp"] is None:
+                errors.append(f'Unable to create request path {tag["tag"]}')
+            else:
+                rp_list.append(tag["rp"])
+
+        offset = len(rp_list) * 2 + 2
+        offsets = []
+        for rp in rp_list:
+            offsets.append(Pack.uint(offset))
+            offset += len(rp)
+
+        msg = self._msg + [Pack.uint(len(rp_list))] + offsets + rp_list
+        return (b'').join(msg)
+
+    def add_read(self, tag, elements=1, tag_info=None):
+        request_path = _create_tag_rp(tag, self._plc.tags, self._plc.use_instance_ids)
+        if request_path is not None:
+            request_path = TagService.read_tag + request_path + Pack.uint(elements)
+            _tag = {'tag': tag, 'elements': elements, 'tag_info': tag_info, 'rp': request_path, 
+             'service': '"read"'}
+            message = self.build_message(self.tags + [_tag])
+            if len(message) < self._plc.connection_size:
+                self._message = message
+                self.tags.append(_tag)
+                return True
+            return False
+        else:
+            self._MultiServiceRequestPacket__log.error(f"Failed to create request path for {tag}")
+            raise RequestError("Failed to create request path")
+
+    def add_write(self, tag, value, elements=1, tag_info=None, bits_write=None):
+        request_path = _create_tag_rp(tag, self._plc.tags, self._plc.use_instance_ids)
+        if request_path is not None:
+            if bits_write:
+                data_type = tag_info["data_type"]
+                request_path = _make_write_data_bit(tag_info, value, request_path)
+            else:
+                request_path, data_type = _make_write_data_tag(tag_info, value, elements, request_path)
+            _tag = {'tag': tag, 'elements': elements, 'tag_info': tag_info, 'rp': request_path, 
+             'service': '"write"', 
+             'value': value, 'data_type': data_type}
+            message = self.build_message(self.tags + [_tag])
+            if len(message) < self._plc.connection_size:
+                self._message = message
+                self.tags.append(_tag)
+                return True
+            return False
+        else:
+            self._MultiServiceRequestPacket__log.error(f"Failed to create request path for {tag}")
+            raise RequestError("Failed to create request path")
+
+    def send(self):
+        if not self._msg_errors:
+            request = self._build_request()
+            self._send(request)
+            self._MultiServiceRequestPacket__log.debug(f"Sent: {self!r}")
+            reply = self._receive()
+            response = MultiServiceResponsePacket(reply, tags=(self.tags))
+        else:
+            self.error = f'Failed to create request path for: {", ".join(self._msg_errors)}'
+            response = MultiServiceResponsePacket()
+            response._error = self.error
+        self._MultiServiceRequestPacket__log.debug(f"Received: {response!r}")
+        return response
+
+
+def _make_write_data_tag(tag_info, value, elements, request_path, fragmented=False):
+    data_type = tag_info["data_type"]
+    if tag_info["tag_type"] == "struct":
+        if not isinstance(value, bytes):
+            raise RequestError("Writing UDTs only supports bytes for value")
+        _dt_value = b'\xa0\x02' + Pack.uint(tag_info["data_type"]["template"]["structure_handle"])
+        data_type = tag_info["data_type"]["name"]
+    else:
+        if data_type not in DataType:
+            raise RequestError("Unsupported data type")
+        else:
+            _dt_value = Pack.uint(DataType[data_type])
+    service = TagService.write_tag_fragmented if fragmented else TagService.write_tag
+    request_path = (b'').join((service,
+     request_path,
+     _dt_value,
+     Pack.uint(elements),
+     value))
+    return (request_path, data_type)
+
+
+def _make_write_data_bit(tag_info, value, request_path):
+    mask_size = DataTypeSize.get(tag_info["data_type"])
+    if mask_size is None:
+        raise RequestError(f'Invalid data type {tag_info["data_type"]} for writing bits')
+    or_mask, and_mask = value
+    return (b'').join((
+     TagService.read_modify_write,
+     request_path,
+     Pack.uint(mask_size),
+     Pack.udint(or_mask)[None[:mask_size]],
+     Pack.udint(and_mask)[None[:mask_size]]))
+
+
+class SendRRDataRequestPacket(RequestPacket):
+    _SendRRDataRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _message_type = DataItem.unconnected
+    _address_type = AddressItem.uccm
+    _encap_command = EncapsulationCommand.send_rr_data
+    _response_class = SendRRDataResponsePacket
+
+    def _build_common_packet_format(self, addr_data=None):
+        return super()._build_common_packet_format(addr_data=None)
+
+
+class RegisterSessionRequestPacket(RequestPacket):
+    _RegisterSessionRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _encap_command = EncapsulationCommand.register_session
+    _response_class = RegisterSessionResponsePacket
+
+    def _build_common_packet_format(self, addr_data=None) -> bytes:
+        return self.message
+
+
+class UnRegisterSessionRequestPacket(RequestPacket):
+    _UnRegisterSessionRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _encap_command = EncapsulationCommand.unregister_session
+    _response_class = UnRegisterSessionResponsePacket
+
+    def _build_common_packet_format(self, addr_data=None) -> bytes:
+        return b''
+
+    def _receive(self):
+        return b''
+
+
+class ListIdentityRequestPacket(RequestPacket):
+    _ListIdentityRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _encap_command = EncapsulationCommand.list_identity
+    _response_class = ListIdentityResponsePacket
+
+    def _build_common_packet_format(self, addr_data=None) -> bytes:
+        return b''
+
+
+def _create_tag_rp(tag, tag_cache, use_instance_ids):
+    """
+
+    It returns the request packed wrapped around the tag passed.
+    If any error it returns none
+    """
+    tags = tag.split(".")
+    if tags:
+        base, *attrs = tags
+        base_tag, index = _find_tag_index(base)
+        if use_instance_ids and base_tag in tag_cache:
+            rp = [
+             CLASS_TYPE["8-bit"],
+             ClassCode.symbol_object,
+             INSTANCE_TYPE["16-bit"],
+             Pack.uint(tag_cache[base_tag]["instance_id"])]
+        else:
+            base_len = len(base_tag)
+            rp = [EXTENDED_SYMBOL,
+             Pack.usint(base_len),
+             base_tag.encode()]
+            if base_len % 2:
+                rp.append(b'\x00')
+        if index is None:
+            return
+        rp += _encode_tag_index(index)
+        for attr in attrs:
+            attr, index = _find_tag_index(attr)
+            tag_length = len(attr)
+            attr_path = [
+             EXTENDED_SYMBOL,
+             Pack.usint(tag_length),
+             attr.encode()]
+            if tag_length % 2:
+                attr_path.append(b'\x00')
+            if index is None:
+                return
+            attr_path += _encode_tag_index(index)
+            rp += attr_path
+
+        return Pack.epath((b'').join(rp))
+
+
+def _find_tag_index(tag):
+    if "[" in tag:
+        t = tag[None[:len(tag) - 1]]
+        inside_value = t[(t.find("[") + 1)[:None]]
+        index = inside_value.split(",")
+        tag = t[None[:t.find("[")]]
+    else:
+        index = []
+    return (
+     tag, index)
+
+
+def _encode_tag_index(index):
+    return [_encode_segment(int(idx), ELEMENT_TYPE) for idx in index]
+
+
+class GenericConnectedRequestPacket(SendUnitDataRequestPacket):
+    _GenericConnectedRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _response_class = GenericConnectedResponsePacket
+
+    def __init__(self, plc):
+        super().__init__(plc)
+        self.service = None
+        self.class_code = None
+        self.instance = None
+        self.attribute = None
+        self.request_data = None
+
+    def build(self, service, class_code, instance, attribute=b'', request_data=b'', data_format=None):
+        self._response_kwargs = {"data_format": data_format}
+        self.class_code = class_code
+        self.instance = instance
+        self.attribute = attribute
+        self.service = service
+        self.request_data = request_data
+        req_path = request_path(class_code, instance, attribute)
+        self.add(service, req_path, request_data)
+
+
+class GenericUnconnectedRequestPacket(SendRRDataRequestPacket):
+    _GenericUnconnectedRequestPacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _response_class = GenericUnconnectedResponsePacket
+
+    def __init__(self, plc):
+        super().__init__(plc)
+        self.service = None
+        self.class_code = None
+        self.instance = None
+        self.attribute = None
+        self.request_data = None
+
+    def build(self, service: Union[(int, bytes)], class_code: Union[(int, bytes)], instance: Union[(int, bytes)], attribute: Union[(int, bytes)]=b'', request_data: bytes=b'', route_path: bytes=b'', unconnected_send: bool=False, data_format: DataFormatType=None):
+        self._response_kwargs = {"data_format": data_format}
+        self.class_code = class_code
+        self.instance = instance
+        self.attribute = attribute
+        self.service = service
+        self.request_data = request_data
+        req_path = request_path(class_code, instance, attribute)
+        if unconnected_send:
+            self.add(wrap_unconnected_send((b'').join((service, req_path, request_data)), route_path))
+        else:
+            self.add(service, req_path, request_data, route_path)
+
+
+def wrap_unconnected_send(message, route_path):
+    rp = request_path(class_code=(ClassCode.connection_manager), instance=b'\x01')
+    msg_len = len(message)
+    return (b'').join([
+     ConnectionManagerService.unconnected_send,
+     rp,
+     PRIORITY,
+     TIMEOUT_TICKS,
+     Pack.uint(msg_len),
+     message,
+     b'\x00' if msg_len % 2 else b'',
+     route_path])
+
+
+def request_path(class_code: Union[(int, bytes)], instance: Union[(int, bytes)], attribute: Union[(int, bytes)]=b'', data: bytes=b''):
+    path = [
+     _encode_segment(class_code, CLASS_TYPE), _encode_segment(instance, INSTANCE_TYPE)]
+    if attribute:
+        path.append(_encode_segment(attribute, ATTRIBUTE_TYPE))
+    if data:
+        path.append(data)
+    return Pack.epath((b'').join(path))
+
+
+def _encode_segment(segment: Union[(bytes, int)], segment_types: dict):
+    if isinstance(segment, int):
+        if segment <= 255:
+            segment = Pack.usint(segment)
+        else:
+            if segment <= 65535:
+                segment = Pack.uint(segment)
+            else:
+                if segment <= 68719476735L:
+                    segment = Pack.dint(segment)
+                else:
+                    raise RequestError("Invalid segment value")
+    _type = segment_types.get(len(segment))
+    if _type is None:
+        raise RequestError("Segment value not valid for segment type")
+    return _type + segment
diff --git a/APPS_UNCOMPILED/lib/pycomm3/packets/responses.py b/APPS_UNCOMPILED/lib/pycomm3/packets/responses.py
new file mode 100644
index 0000000..2108c03
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/packets/responses.py
@@ -0,0 +1,474 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/packets/responses.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 18993 bytes
+import logging
+from itertools import tee, zip_longest, chain
+from reprlib import repr as _r
+from . import Packet, DataFormatType
+from ..bytes_ import Unpack
+from ..const import SUCCESS, INSUFFICIENT_PACKETS, TagService, SERVICE_STATUS, EXTEND_CODES, MULTI_PACKET_SERVICES, DataType, STRUCTURE_READ_REPLY, DataTypeSize
+
+class ResponsePacket(Packet):
+    _ResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, raw_data=None, *args, **kwargs):
+        super().__init__()
+        self.raw = raw_data
+        self._error = None
+        self.service = None
+        self.service_status = None
+        self.data = None
+        self.command = None
+        self.command_status = None
+        self._is_valid = False
+        if raw_data is not None:
+            self._parse_reply()
+
+    def __bool__(self):
+        return self.is_valid()
+
+    @property
+    def error(self):
+        if self.is_valid():
+            return
+        if self._error is None:
+            if self.command_status not in (None, SUCCESS):
+                return self.command_extended_status()
+            if self.service_status not in (None, SUCCESS):
+                return self.service_extended_status()
+            return "Unknown Error"
+        return self._error
+
+    def is_valid(self):
+        return all((
+         self._error is None,
+         self.command is not None,
+         self.command_status == SUCCESS))
+
+    def _parse_reply(self):
+        try:
+            if self.raw is None:
+                self._error = "No Reply From PLC"
+            else:
+                self.command = self.raw[None[:2]]
+                self.command_status = Unpack.dint(self.raw[8[:12]])
+        except Exception as err:
+            try:
+                self._error = f"Failed to parse reply - {err}"
+            finally:
+                err = None
+                del err
+
+    def command_extended_status(self):
+        return "Unknown Error"
+
+    def service_extended_status(self):
+        return "Unknown Error"
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(service={self.service if self.service else None!r}, command={self.command!r}, error={self.error!r})"
+
+    __str__ = __repr__
+
+
+class SendUnitDataResponsePacket(ResponsePacket):
+    _SendUnitDataResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, raw_data=None, *args, **kwargs):
+        (super().__init__)(raw_data, *args, **kwargs)
+
+    def _parse_reply(self):
+        try:
+            super()._parse_reply()
+            self.service = TagService.get(TagService.from_reply(self.raw[46[:47]]))
+            self.service_status = Unpack.usint(self.raw[48[:49]])
+            self.data = self.raw[50[:None]]
+        except Exception as err:
+            try:
+                self._error = f"Failed to parse reply - {err}"
+            finally:
+                err = None
+                del err
+
+    def is_valid(self):
+        valid = self.service_status == SUCCESS or self.service_status == INSUFFICIENT_PACKETS and self.service in MULTI_PACKET_SERVICES
+        return all((
+         super().is_valid(),
+         valid))
+
+    def command_extended_status(self):
+        return f"{get_service_status(self.command_status)} - {get_extended_status(self.raw, 48)}"
+
+    def service_extended_status(self):
+        return f"{get_service_status(self.service_status)} - {get_extended_status(self.raw, 48)}"
+
+
+class SendRRDataResponsePacket(ResponsePacket):
+    _SendRRDataResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, raw_data=None, *args, **kwargs):
+        super().__init__(raw_data)
+
+    def _parse_reply(self):
+        try:
+            super()._parse_reply()
+            self.service = TagService.get(TagService.from_reply(self.raw[40[:41]]))
+            self.service_status = Unpack.usint(self.raw[42[:43]])
+            self.data = self.raw[44[:None]]
+        except Exception as err:
+            try:
+                self._error = f"Failed to parse reply - {err}"
+            finally:
+                err = None
+                del err
+
+    def is_valid(self):
+        return all((
+         super().is_valid(),
+         self.service_status == SUCCESS))
+
+    def command_extended_status(self):
+        return f"{get_service_status(self.command_status)} - {get_extended_status(self.raw, 42)}"
+
+    def service_extended_status(self):
+        return f"{get_service_status(self.service_status)} - {get_extended_status(self.raw, 42)}"
+
+
+class GenericConnectedResponsePacket(SendUnitDataResponsePacket):
+    _GenericConnectedResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, *args, data_format, **kwargs):
+        self.data_format = data_format
+        self.value = None
+        (super().__init__)(*args, **kwargs)
+
+    def _parse_reply(self):
+        super()._parse_reply()
+        if self.data_format is None:
+            self.value = self.data
+        else:
+            if self.is_valid():
+                try:
+                    self.value = _parse_data(self.data, self.data_format)
+                except Exception as err:
+                    try:
+                        self._error = f"Failed to parse reply - {err}"
+                        self.value = None
+                    finally:
+                        err = None
+                        del err
+
+
+class GenericUnconnectedResponsePacket(SendRRDataResponsePacket):
+    _GenericUnconnectedResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, *args, data_format, **kwargs):
+        self.data_format = data_format
+        self.value = None
+        (super().__init__)(*args, **kwargs)
+
+    def _parse_reply(self):
+        super()._parse_reply()
+        if self.data_format is None:
+            self.value = self.data
+        else:
+            if self.is_valid():
+                try:
+                    self.value = _parse_data(self.data, self.data_format)
+                except Exception as err:
+                    try:
+                        self._error = f"Failed to parse reply - {err}"
+                        self.value = None
+                    finally:
+                        err = None
+                        del err
+
+
+def _parse_data(data, fmt):
+    values = {}
+    start = 0
+    for name, typ in fmt:
+        if isinstance(typ, int):
+            value = data[start[:start + typ]]
+            start += typ
+        else:
+            unpack_func = Unpack[typ]
+            value = unpack_func(data[start[:None]])
+            data_size = len(value) + 1 if typ == "SHORT_STRING" else DataTypeSize[typ]
+            start += data_size
+        if name:
+            values[name] = value
+
+    return values
+
+
+class ReadTagServiceResponsePacket(SendUnitDataResponsePacket):
+    _ReadTagServiceResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, raw_data=None, tag_info=None, elements=1, tag=None, *args, **kwargs):
+        self.value = None
+        self.elements = elements
+        self.data_type = None
+        self.tag_info = tag_info
+        self.tag = tag
+        (super().__init__)(raw_data, *args, **kwargs)
+
+    def _parse_reply(self):
+        try:
+            super()._parse_reply()
+            if self.is_valid():
+                self.value, self.data_type = parse_read_reply(self.data, self.tag_info, self.elements)
+            else:
+                self.value, self.data_type = (None, None)
+        except Exception as err:
+            try:
+                self._ReadTagServiceResponsePacket__log.exception("Failed parsing reply data")
+                self.value = None
+                self._error = f"Failed to parse reply - {err}"
+            finally:
+                err = None
+                del err
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}({self.data_type!r}, {_r(self.value)}, {self.service_status!r})"
+
+
+class ReadTagFragmentedServiceResponsePacket(SendUnitDataResponsePacket):
+    _ReadTagFragmentedServiceResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, raw_data=None, tag_info=None, elements=1, *args, **kwargs):
+        self.value = None
+        self.elements = elements
+        self.data_type = None
+        self.tag_info = tag_info
+        self.bytes_ = None
+        (super().__init__)(raw_data, *args, **kwargs)
+
+    def _parse_reply(self):
+        super()._parse_reply()
+        if self.data[None[:2]] == STRUCTURE_READ_REPLY:
+            self.bytes_ = self.data[4[:None]]
+            self._data_type = self.data[None[:4]]
+        else:
+            self.bytes_ = self.data[2[:None]]
+            self._data_type = self.data[None[:2]]
+
+    def parse_bytes(self):
+        try:
+            if self.is_valid():
+                self.value, self.data_type = parse_read_reply(self._data_type + self.bytes_, self.tag_info, self.elements)
+            else:
+                self.value, self.data_type = (None, None)
+        except Exception as err:
+            try:
+                self._ReadTagFragmentedServiceResponsePacket__log.exception("Failed parsing reply data")
+                self.value = None
+                self._error = f"Failed to parse reply - {err}"
+            finally:
+                err = None
+                del err
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(raw_data={_r(self.raw)})"
+
+    __str__ = __repr__
+
+
+class WriteTagServiceResponsePacket(SendUnitDataResponsePacket):
+    _WriteTagServiceResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+
+class WriteTagFragmentedServiceResponsePacket(SendUnitDataResponsePacket):
+    _WriteTagFragmentedServiceResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+
+class MultiServiceResponsePacket(SendUnitDataResponsePacket):
+    _MultiServiceResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, raw_data=None, tags=None, *args, **kwargs):
+        self.tags = tags
+        self.values = None
+        self.request_statuses = None
+        (super().__init__)(raw_data, *args, **kwargs)
+
+    def _parse_reply(self):
+        super()._parse_reply()
+        num_replies = Unpack.uint(self.data)
+        offset_data = self.data[2[:2 + 2 * num_replies]]
+        offsets = (Unpack.uint(offset_data[i[:i + 2]]) for i in range(0, len(offset_data), 2))
+        start, end = tee(offsets)
+        next(end)
+        reply_data = [self.data[i[:j]] for i, j in zip_longest(start, end)]
+        values = []
+        for data, tag in zip(reply_data, self.tags):
+            service = data[0[:1]]
+            service_status = data[2]
+            tag["service_status"] = service_status
+            if service_status != SUCCESS:
+                tag["error"] = f"{get_service_status(service_status)} - {get_extended_status(data, 2)}"
+            if TagService.get(TagService.from_reply(service)) == TagService.read_tag:
+                if service_status == SUCCESS:
+                    value, dt = parse_read_reply(data[4[:None]], tag["tag_info"], tag["elements"])
+                else:
+                    value, dt = (None, None)
+                values.append(value)
+                tag["value"] = value
+                tag["data_type"] = dt
+            else:
+                tag["value"] = None
+                tag["data_type"] = None
+
+        self.values = values
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(values={_r(self.values)}, error={self.error!r})"
+
+
+class RegisterSessionResponsePacket(ResponsePacket):
+    _RegisterSessionResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, raw_data=None, *args, **kwargs):
+        self.session = None
+        super().__init__(raw_data)
+
+    def _parse_reply(self):
+        try:
+            super()._parse_reply()
+            self.session = Unpack.udint(self.raw[4[:8]])
+        except Exception as err:
+            try:
+                self._error = f"Failed to parse reply - {err}"
+            finally:
+                err = None
+                del err
+
+    def is_valid(self):
+        return all((
+         super().is_valid(),
+         self.session is not None))
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(session={self.session!r}, error={self.error!r})"
+
+
+class UnRegisterSessionResponsePacket(ResponsePacket):
+    _UnRegisterSessionResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def _parse_reply(self):
+        pass
+
+    def is_valid(self):
+        return True
+
+    def __repr__(self):
+        return "UnRegisterSessionResponsePacket()"
+
+
+class ListIdentityResponsePacket(ResponsePacket):
+    _ListIdentityResponsePacket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+    _data_format = (('item_type_code', 'UINT'), ('item_length', 'UINT'), ('encap_protocol_version', 'UINT'),
+                    ('_socket_address_struct', 16), ('vendor_id', 'UINT'), ('product_code', 'UINT'),
+                    ('revision_major', 'USINT'), ('revision_minor', 'USINT'), ('status', 'WORD'),
+                    ('serial_number', 'UDINT'), ('product_name', 'SHORT_STRING'),
+                    ('state', 'USINT'))
+
+    def __init__(self, raw_data=None, *args, **kwargs):
+        self.identity = {}
+        super().__init__(raw_data)
+
+    def _parse_reply(self):
+        try:
+            super()._parse_reply()
+            self.data = self.raw[28[:None]]
+            self.identity = _parse_data(self.data, self._data_format)
+        except Exception as err:
+            try:
+                self._ListIdentityResponsePacket__log.exception("Failed to parse response")
+                self._error = f"Failed to parse reply - {err}"
+            finally:
+                err = None
+                del err
+
+    def is_valid(self):
+        return all((
+         super().is_valid(),
+         self.identity is not None))
+
+    def __repr__(self):
+        return f"{self.__class__.__name__}(identity={self.identity!r}, error={self.error!r})"
+
+
+def parse_read_reply(data, data_type, elements):
+    if data[None[:2]] == STRUCTURE_READ_REPLY:
+        data = data[4[:None]]
+        size = data_type["data_type"]["template"]["structure_size"]
+        dt_name = data_type["data_type"]["name"]
+        if elements > 1:
+            value = [parse_read_reply_struct(data[i[:i + size]], data_type["data_type"]) for i in range(0, len(data), size)]
+        else:
+            value = parse_read_reply_struct(data, data_type["data_type"])
+    else:
+        datatype = DataType[Unpack.uint(data[None[:2]])]
+        dt_name = datatype
+        if elements > 1:
+            func = Unpack[datatype]
+            size = DataTypeSize[datatype]
+            data = data[2[:None]]
+            value = [func(data[i[:i + size]]) for i in range(0, len(data), size)]
+            if datatype == "DWORD":
+                value = list(chain.from_iterable((dword_to_bool_array(val) for val in value)))
+        else:
+            value = Unpack[datatype](data[2[:None]])
+            if datatype == "DWORD":
+                value = dword_to_bool_array(value)
+        if dt_name == "DWORD":
+            dt_name = f"BOOL[{elements * 32}]"
+        else:
+            if elements > 1:
+                dt_name = f"{dt_name}[{elements}]"
+            return (value, dt_name)
+
+
+def parse_read_reply_structParse error at or near `LOAD_DICTCOMP' instruction at offset 512
+
+
+def parse_string(data):
+    str_len = Unpack.dint(data)
+    str_data = data[4[:4 + str_len]]
+    return "".join((chr(v + 256) if v < 0 else chr(v) for v in str_data))
+
+
+def dword_to_bool_array(dword):
+    bits = [x == "1" for x in bin(dword)[2[:None]]]
+    bools = [False for _ in range(32 - len(bits))] + bits
+    bools.reverse()
+    return bools
+
+
+def get_service_status(status):
+    return SERVICE_STATUS.get(status, f"Unknown Error ({status:0>2x})")
+
+
+def get_extended_status(msg, start):
+    status = Unpack.usint(msg[start[:start + 1]])
+    extended_status_size = Unpack.usint(msg[(start + 1)[:start + 2]]) * 2
+    extended_status = 0
+    if extended_status_size != 0:
+        if extended_status_size == 1:
+            extended_status = Unpack.usint(msg[(start + 2)[:start + 3]])
+        else:
+            if extended_status_size == 2:
+                extended_status = Unpack.uint(msg[(start + 2)[:start + 4]])
+            else:
+                if extended_status_size == 4:
+                    extended_status = Unpack.dint(msg[(start + 2)[:start + 6]])
+                else:
+                    return "Extended Status Size Unknown"
+    try:
+        return f"{EXTEND_CODES[status][extended_status]}  ({status:0>2x}, {extended_status:0>2x})"
+    except LookupError:
+        return "No Extended Status"
\ No newline at end of file
diff --git a/APPS_UNCOMPILED/lib/pycomm3/slc.py b/APPS_UNCOMPILED/lib/pycomm3/slc.py
new file mode 100644
index 0000000..b433e3e
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/slc.py
@@ -0,0 +1,321 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/slc.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 17226 bytes
+__all__ = [
+ "SLCDriver"]
+import logging, math, re
+from typing import List, Tuple, Optional, Union
+from .bytes_ import Pack, Unpack
+from .cip_base import CIPDriver, with_forward_open
+from .const import CLASS_TYPE, SUCCESS, PCCC_CT, PCCC_DATA_TYPE, PCCC_DATA_SIZE, PCCC_ERROR_CODE, SLC_CMD_CODE, SLC_FNC_READ, SLC_FNC_WRITE, SLC_REPLY_START, PCCC_PATH
+from .exceptions import DataError, RequestError
+from .tag import Tag
+AtomicValueType = Union[(int, float, bool)]
+TagValueType = Union[(AtomicValueType, List[AtomicValueType])]
+ReadWriteReturnType = Union[(Tag, List[Tag])]
+
+class SLCDriver(CIPDriver):
+    __doc__ = "\n    An Ethernet/IP Client driver for reading and writing of data files in SLC or MicroLogix PLCs.\n    "
+    _SLCDriver__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, path, *args, **kwargs):
+        (super().__init__)(path, *args, large_packets=False, **kwargs)
+
+    @with_forward_open
+    def read(self, *addresses: str) -> ReadWriteReturnType:
+        """
+        Reads data file addresses. To read multiple words add the word count to the address using curly braces,
+        e.g. ``N120:10{10}``.
+
+        Does not track request/response size like the CLXDriver.
+
+        :param addresses: one or many data file addresses to read
+        :return: a single or list of ``Tag`` objects
+        """
+        results = [self._read_tag(tag) for tag in addresses]
+        if len(results) == 1:
+            return results[0]
+        return results
+
+    def _read_tag(self, tag) -> Tag:
+        _tag = parse_tag(tag)
+        if _tag is None:
+            raise RequestError(f"Error parsing the tag passed to read() - {tag}")
+        message_request = [
+         b'K',
+         b'\x02',
+         CLASS_TYPE["8-bit"],
+         PCCC_PATH,
+         b'\x07',
+         self._cfg["vid"],
+         self._cfg["vsn"],
+         SLC_CMD_CODE,
+         b'\x00',
+         Pack.uint(self._sequence),
+         SLC_FNC_READ,
+         Pack.usint(PCCC_DATA_SIZE[_tag["file_type"]] * _tag["element_count"]),
+         Pack.usint(int(_tag["file_number"])),
+         PCCC_DATA_TYPE[_tag["file_type"]],
+         Pack.usint(int(_tag["element_number"])),
+         Pack.usint(int(_tag.get("pos_number", 0)))]
+        request = self.new_request("send_unit_data")
+        request.add((b'').join(message_request))
+        response = request.send()
+        self._SLCDriver__log.debug(f"SLC read_tag({tag})")
+        status = request_status(response.raw)
+        if status is not None:
+            return Tag(_tag["tag"], None, _tag["file_type"], status)
+        try:
+            return _parse_read_reply(_tag, response.raw[SLC_REPLY_START[:None]])
+        except DataError as err:
+            try:
+                self._SLCDriver__log.exception(f'Failed to parse read reply for {_tag["tag"]}')
+                return Tag(_tag["tag"], None, _tag["file_type"], str(err))
+            finally:
+                err = None
+                del err
+
+    @with_forward_open
+    def write(self, *address_values: Tuple[(str, TagValueType)]) -> ReadWriteReturnType:
+        """
+        Write values to data file addresses.  To write to multiple words in a file use curly braces in the address
+        to indicate the number of words, then set the value to a list of values to write e.g. ``('N120:10{10}', [1, 2, ...])``.
+
+        Does not track request/response size like the CLXDriver.
+
+        :param address_values: one or many 2-element tuples of (address, value)
+        :return: a single or list of ``Tag`` objects
+        """
+        results = [self._write_tag(tag, value) for tag, value in address_values]
+        if len(results) == 1:
+            return results[0]
+        return results
+
+    def _write_tag(self, tag: str, value: TagValueType) -> Tag:
+        """ write tag from a connected plc
+        Possible combination can be passed to this method:
+            c.write_tag('N7:0', [-30, 32767, -32767])
+            c.write_tag('N7:0', 21)
+            c.read_tag('N7:0', 10)
+        It is not possible to write status bit
+        :return: None is returned in case of error
+        """
+        _tag = parse_tag(tag)
+        if _tag is None:
+            raise RequestError(f"Error parsing the tag passed to write() - {tag}")
+        _tag["data_size"] = PCCC_DATA_SIZE[_tag["file_type"]]
+        message_request = [
+         b'K',
+         b'\x02',
+         CLASS_TYPE["8-bit"],
+         PCCC_PATH,
+         b'\x07',
+         self._cfg["vid"],
+         self._cfg["vsn"],
+         SLC_CMD_CODE,
+         b'\x00',
+         Pack.uint(self._sequence),
+         SLC_FNC_WRITE,
+         Pack.usint(_tag["data_size"] * _tag["element_count"]),
+         Pack.usint(int(_tag["file_number"])),
+         PCCC_DATA_TYPE[_tag["file_type"]],
+         Pack.usint(int(_tag["element_number"])),
+         Pack.usint(int(_tag.get("pos_number", 0))),
+         writeable_value(_tag, value)]
+        request = self.new_request("send_unit_data")
+        request.add((b'').join(message_request))
+        response = request.send()
+        status = request_status(response.raw)
+        if status is not None:
+            return Tag(_tag["tag"], None, _tag["file_type"], status)
+        return Tag(_tag["tag"], value, _tag["file_type"], None)
+
+
+def _parse_read_reply(tag, data) -> Tag:
+    try:
+        bit_read = tag.get("address_field", 0) == 3
+        bit_position = int(tag.get("sub_element") or 0)
+        data_size = PCCC_DATA_SIZE[tag["file_type"]]
+        unpack_func = Unpack[f'pccc_{tag["file_type"].lower()}']
+        if bit_read:
+            new_value = 0
+            if tag["file_type"] in ('T', 'C'):
+                if bit_position == PCCC_CT["PRE"]:
+                    return Tag(tag["tag"], unpack_func(data[(new_value + 2)[:new_value + 2 + data_size]]), tag["file_type"], None)
+                if bit_position == PCCC_CT["ACC"]:
+                    return Tag(tag["tag"], unpack_func(data[(new_value + 4)[:new_value + 4 + data_size]]), tag["file_type"], None)
+            tag_value = unpack_func(data[new_value[:new_value + data_size]])
+            return Tag(tag["tag"], get_bit(tag_value, bit_position), tag["file_type"], None)
+        values_list = [unpack_func(data[i[:i + data_size]]) for i in range(0, len(data), data_size)]
+        if len(values_list) > 1:
+            return Tag(tag["tag"], values_list, tag["file_type"], None)
+        return Tag(tag["tag"], values_list[0], tag["file_type"], None)
+    except Exception as err:
+        try:
+            raise DataError("Failed parsing tag read reply") from err
+        finally:
+            err = None
+            del err
+
+
+def parse_tag(tag: str) -> Optional[dict]:
+    t = re.search("(?P[CT])(?P\\d{1,3})(:)(?P\\d{1,3})(.)(?PACC|PRE|EN|DN|TT|CU|CD|DN|OV|UN|UA)", tag,
+      flags=(re.IGNORECASE))
+    if t:
+        if 1 <= int(t.group("file_number")) <= 255:
+            if 0 <= int(t.group("element_number")) <= 255:
+                return {'file_type':(t.group("file_type").upper)(),  'file_number':(t.group)("file_number"), 
+                 'element_number':(t.group)("element_number"), 
+                 'sub_element':PCCC_CT[t.group("sub_element").upper()], 
+                 'address_field':3, 
+                 'element_count':1, 
+                 'tag':(t.group)(0)}
+    t = re.search("(?P[LFBN])(?P\\d{1,3})(:)(?P\\d{1,3})(/(?P\\d{1,2}))?(?P<_elem_cnt_token>{(?P\\d+)})?", tag,
+      flags=(re.IGNORECASE))
+    if t:
+        _cnt = t.group("_elem_cnt_token")
+        tag_name = t.group(0).replace(_cnt, "") if _cnt else t.group(0)
+        if t.group("sub_element") is not None:
+            if 1 <= int(t.group("file_number")) <= 255:
+                if 0 <= int(t.group("element_number")) <= 255:
+                    if 0 <= int(t.group("sub_element")) <= 15:
+                        element_count = t.group("element_count")
+                        return {'file_type':(t.group("file_type").upper)(),  'file_number':(t.group)("file_number"), 
+                         'element_number':(t.group)("element_number"), 
+                         'sub_element':(t.group)("sub_element"), 
+                         'address_field':3, 
+                         'element_count':int(element_count) if (element_count is not None) else 1, 
+                         'tag':tag_name}
+        elif 1 <= int(t.group("file_number")) <= 255:
+            if 0 <= int(t.group("element_number")) <= 255:
+                element_count = t.group("element_count")
+                return {'file_type':(t.group("file_type").upper)(),  'file_number':(t.group)("file_number"), 
+                 'element_number':(t.group)("element_number"), 
+                 'sub_element':(t.group)("sub_element"), 
+                 'address_field':2, 
+                 'element_count':int(element_count) if (element_count is not None) else 1, 
+                 'tag':tag_name}
+    t = re.search("(?P[IO])(:)(?P\\d{1,3})(.)(?P\\d{1,3})(/(?P\\d{1,2}))?(?P<_elem_cnt_token>{(?P\\d+)})?", tag,
+      flags=(re.IGNORECASE))
+    if t:
+        _cnt = t.group("_elem_cnt_token")
+        tag_name = t.group(0).replace(_cnt, "") if _cnt else t.group(0)
+        if t.group("sub_element") is not None:
+            if 0 <= int(t.group("file_number")) <= 255:
+                if 0 <= int(t.group("element_number")) <= 255:
+                    if 0 <= int(t.group("sub_element")) <= 15:
+                        element_count = t.group("element_count")
+                        return {'file_type':(t.group("file_type").upper)(),  'file_number':"0", 
+                         'element_number':(t.group)("element_number"), 
+                         'pos_number':(t.group)("position_number"), 
+                         'sub_element':(t.group)("sub_element"), 
+                         'address_field':3, 
+                         'element_count':int(element_count) if (element_count is not None) else 1, 
+                         'tag':tag_name}
+        elif 0 <= int(t.group("element_number")) <= 255:
+            element_count = t.group("element_count")
+            return {'file_type':(t.group("file_type").upper)(),  'file_number':"0", 
+             'element_number':(t.group)("element_number"), 
+             'pos_number':(t.group)("position_number"), 
+             'address_field':2, 
+             'element_count':int(element_count) if (element_count is not None) else 1, 
+             'tag':tag_name}
+    t = re.search("(?PS)(:)(?P\\d{1,3})(/(?P\\d{1,2}))?(?P<_elem_cnt_token>{(?P\\d+)})?", tag,
+      flags=(re.IGNORECASE))
+    if t:
+        _cnt = t.group("_elem_cnt_token")
+        tag_name = t.group(0).replace(_cnt, "") if _cnt else t.group(0)
+        element_count = t.group("element_count")
+        if t.group("sub_element") is not None:
+            if 0 <= int(t.group("element_number")) <= 255:
+                if 0 <= int(t.group("sub_element")) <= 15:
+                    return {'file_type':(t.group("file_type").upper)(),  'file_number':"2", 
+                     'element_number':(t.group)("element_number"), 
+                     'sub_element':(t.group)("sub_element"), 
+                     'address_field':3, 
+                     'element_count':int(element_count) if (element_count is not None) else 1, 
+                     'tag':(t.group)(0)}
+        elif 0 <= int(t.group("element_number")) <= 255:
+            return {'file_type':(t.group("file_type").upper)(), 
+             'file_number':"2", 
+             'element_number':(t.group)("element_number"), 
+             'address_field':2, 
+             'element_count':int(element_count) if (element_count is not None) else 1, 
+             'tag':tag_name}
+    t = re.search("(?PB)(?P\\d{1,3})(/)(?P\\d{1,4})(?P<_elem_cnt_token>{(?P\\d+)})?", tag,
+      flags=(re.IGNORECASE))
+    if t:
+        if 1 <= int(t.group("file_number")) <= 255:
+            if 0 <= int(t.group("element_number")) <= 4095:
+                _cnt = t.group("_elem_cnt_token")
+                tag_name = t.group(0).replace(_cnt, "") if _cnt else t.group(0)
+                bit_position = int(t.group("element_number"))
+                element_number = bit_position / 16
+                sub_element = bit_position - element_number * 16
+                element_count = t.group("element_count")
+                return {'file_type':(t.group("file_type").upper)(),  'file_number':(t.group)("file_number"), 
+                 'element_number':element_number, 
+                 'sub_element':sub_element, 
+                 'address_field':3, 
+                 'element_count':int(element_count) if (element_count is not None) else 1, 
+                 'tag':tag_name}
+
+
+def get_bit(value: int, idx: int) -> bool:
+    """:returns value of bit at position idx"""
+    return value & 1 << idx != 0
+
+
+def writeable_value(tag: dict, value: Union[(bytes, TagValueType)]) -> bytes:
+    if isinstance(value, bytes):
+        return value
+        bit_field = tag.get("address_field", 0) == 3
+        bit_position = int(tag.get("sub_element") or 0) if bit_field else 0
+        element_count = tag.get("element_count") or 1
+        if element_count > 1:
+            if len(value) < element_count:
+                raise RequestError(f"Insufficient data for requested elements, expected {element_count} and got {len(value)}")
+            if len(value) > element_count:
+                value = value[None[:element_count]]
+    else:
+        try:
+            pack_func = Pack[f'pccc_{tag["file_type"].lower()}']
+            if element_count > 1:
+                _value = (b'').join((pack_func(val) for val in value))
+            else:
+                if bit_field:
+                    tag["data_size"] = 2
+                    if tag["file_type"] in ('T', 'C') and bit_position in {
+                     PCCC_CT["PRE"],
+                     PCCC_CT["ACC"]}:
+                        _value = b'\xff\xff' + pack_func(value)
+                    else:
+                        if value > 0:
+                            _value = Pack.uint(math.pow(2, bit_position)) + Pack.uint(math.pow(2, bit_position))
+                        else:
+                            _value = Pack.uint(math.pow(2, bit_position)) + Pack.uint(0)
+                else:
+                    _value = pack_func(value)
+        except Exception as err:
+            try:
+                raise RequestError(f'Failed to create a writeable value for {tag["tag"]} from {value}') from err
+            finally:
+                err = None
+                del err
+
+        else:
+            return _value
+
+
+def request_status(data) -> Optional[str]:
+    try:
+        _status_code = int(data[58])
+        if _status_code == SUCCESS:
+            return
+        return PCCC_ERROR_CODE.get(_status_code, "Unknown Status")
+    except Exception:
+        return "Unknown Status"
diff --git a/APPS_UNCOMPILED/lib/pycomm3/socket_.py b/APPS_UNCOMPILED/lib/pycomm3/socket_.py
new file mode 100644
index 0000000..32b78f0
--- /dev/null
+++ b/APPS_UNCOMPILED/lib/pycomm3/socket_.py
@@ -0,0 +1,63 @@
+# uncompyle6 version 3.9.2
+# Python bytecode version base 3.7.0 (3394)
+# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) 
+# [Clang 14.0.6 ]
+# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pycomm3/socket_.py
+# Compiled at: 2024-04-18 03:12:57
+# Size of source mod 2**32: 2788 bytes
+import logging, socket, struct
+from .exceptions import CommError
+from .const import HEADER_SIZE
+
+class Socket:
+    _Socket__log = logging.getLogger(f"{__module__}.{__qualname__}")
+
+    def __init__(self, timeout=5.0):
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.settimeout(timeout)
+        self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+
+    def connect(self, host, port):
+        try:
+            self.sock.connect((host, port))
+        except socket.timeout:
+            raise CommError("Socket timeout during connection.")
+
+    def send(self, msg, timeout=0):
+        if timeout != 0:
+            self.sock.settimeout(timeout)
+        total_sent = 0
+        while total_sent < len(msg):
+            try:
+                sent = self.sock.send(msg[total_sent[:None]])
+                if sent == 0:
+                    raise CommError("socket connection broken.")
+                total_sent += sent
+            except socket.error as err:
+                try:
+                    raise CommError("socket connection broken.") from err
+                finally:
+                    err = None
+                    del err
+
+        return total_sent
+
+    def receive(self, timeout=0):
+        try:
+            if timeout != 0:
+                self.sock.settimeout(timeout)
+            data = self.sock.recv(4096)
+            data_len = struct.unpack_from(", !"``), built up using :class:`Word`,
+:class:`Literal`, and :class:`And` elements
+(the :class:`'+'` operators create :class:`And` expressions,
+and the strings are auto-converted to :class:`Literal` expressions)::
+
+    from pyparsing import Word, alphas
+
+    # define grammar of a greeting
+    greet = Word(alphas) + "," + Word(alphas) + "!"
+
+    hello = "Hello, World!"
+    print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+    Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the
+self-explanatory class names, and the use of '+', '|' and '^' operators.
+
+The :class:`ParseResults` object returned from
+:class:`ParserElement.parseString` can be
+accessed as a nested list, a dictionary, or an object with named
+attributes.
+
+The pyparsing module handles some of the problems that are typically
+vexing when writing text parsers:
+
+  - extra or missing whitespace (the above program will also handle
+    "Hello,World!", "Hello  ,  World  !", etc.)
+  - quoted strings
+  - embedded comments
+
+Getting Started -
+-----------------
+Visit the classes :class:`ParserElement` and :class:`ParseResults` to
+see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+
+ - construct literal match expressions from :class:`Literal` and
+   :class:`CaselessLiteral` classes
+ - construct character word-group expressions using the :class:`Word`
+   class
+ - see how to create repetitive expressions using :class:`ZeroOrMore`
+   and :class:`OneOrMore` classes
+ - use :class:`'+'`, :class:`'|'`, :class:`'^'`,
+   and :class:`'&'` operators to combine simple expressions into
+   more complex ones
+ - associate names with your parsed results using
+   :class:`ParserElement.setResultsName`
+ - access the parsed data, which is returned as a :class:`ParseResults`
+   object
+ - find some helpful expression short-cuts like :class:`delimitedList`
+   and :class:`oneOf`
+ - find more useful common expressions in the :class:`pyparsing_common`
+   namespace class
+"""
+__version__ = "2.4.7"
+__versionTime__ = "30 Mar 2020 00:43 UTC"
+__author__ = "Paul McGuire "
+import string
+from weakref import ref as wkref
+import copy, sys, warnings, re, sre_constants, collections, pprint, traceback, types
+from datetime import datetime
+from operator import itemgetter
+import itertools
+from functools import wraps
+from contextlib import contextmanager
+try:
+    from itertools import filterfalse
+except ImportError:
+    from itertools import ifilterfalse as filterfalse
+
+try:
+    from _thread import RLock
+except ImportError:
+    from threading import RLock
+
+try:
+    from collections.abc import Iterable
+    from collections.abc import MutableMapping, Mapping
+except ImportError:
+    from collections import Iterable
+    from collections import MutableMapping, Mapping
+
+try:
+    from collections import OrderedDict as _OrderedDict
+except ImportError:
+    try:
+        from ordereddict import OrderedDict as _OrderedDict
+    except ImportError:
+        _OrderedDict = None
+
+try:
+    from types import SimpleNamespace
+except ImportError:
+
+    class SimpleNamespace:
+        pass
+
+
+__compat__ = SimpleNamespace()
+__compat__.__doc__ = "\n    A cross-version compatibility configuration for pyparsing features that will be\n    released in a future version. By setting values in this configuration to True,\n    those features can be enabled in prior versions for compatibility development\n    and testing.\n\n     - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping\n       of results names when an And expression is nested within an Or or MatchFirst; set to\n       True to enable bugfix released in pyparsing 2.3.0, or False to preserve\n       pre-2.3.0 handling of named results\n"
+__compat__.collect_all_And_tokens = True
+__diag__ = SimpleNamespace()
+__diag__.__doc__ = "\nDiagnostic configuration (all default to False)\n     - warn_multiple_tokens_in_named_alternation - flag to enable warnings when a results\n       name is defined on a MatchFirst or Or expression with one or more And subexpressions\n       (only warns if __compat__.collect_all_And_tokens is False)\n     - warn_ungrouped_named_tokens_in_collection - flag to enable warnings when a results\n       name is defined on a containing expression with ungrouped subexpressions that also\n       have results names\n     - warn_name_set_on_empty_Forward - flag to enable warnings whan a Forward is defined\n       with a results name, but has no contents defined\n     - warn_on_multiple_string_args_to_oneof - flag to enable warnings whan oneOf is\n       incorrectly called with multiple str arguments\n     - enable_debug_on_named_expressions - flag to auto-enable debug on all subsequent\n       calls to ParserElement.setName()\n"
+__diag__.warn_multiple_tokens_in_named_alternation = False
+__diag__.warn_ungrouped_named_tokens_in_collection = False
+__diag__.warn_name_set_on_empty_Forward = False
+__diag__.warn_on_multiple_string_args_to_oneof = False
+__diag__.enable_debug_on_named_expressions = False
+__diag__._all_names = [nm for nm in vars(__diag__) if not nm.startswith("enable_") if nm.startswith("warn_")]
+
+def _enable_all_warnings():
+    __diag__.warn_multiple_tokens_in_named_alternation = True
+    __diag__.warn_ungrouped_named_tokens_in_collection = True
+    __diag__.warn_name_set_on_empty_Forward = True
+    __diag__.warn_on_multiple_string_args_to_oneof = True
+
+
+__diag__.enable_all_warnings = _enable_all_warnings
+__all__ = [
+ '__version__', '__versionTime__', '__author__', '__compat__', '__diag__', 
+ 'And', 
+ 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 
+ 'Empty', 
+ 'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 
+ 'LineStart', 'Literal', 
+ 'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 
+ 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', 
+ 'ParseBaseException', 'ParseElementEnhance', 
+ 'ParseException', 'ParseExpression', 'ParseFatalException', 
+ 'ParseResults', 
+ 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', 
+ 'Regex', 
+ 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 
+ 'White', 
+ 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char', 
+ 'alphanums', 'alphas', 
+ 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', 
+ 'commaSeparatedList', 
+ 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', 
+ 'dblSlashComment', 
+ 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', 
+ 'htmlComment', 
+ 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', 
+ 'makeHTMLTags', 
+ 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', 
+ 'nestedExpr', 
+ 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', 
+ 'punc8bit', 
+ 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 
+ 'replaceWith', 
+ 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', 
+ 'stringStart', 
+ 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', 
+ 'indentedBlock', 
+ 'originalTextFor', 'ungroup', 'infixNotation', 'locatedExpr', 'withClass', 
+ 'CloseMatch', 
+ 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', 
+ 'conditionAsParseAction', 
+ 're']
+system_version = tuple(sys.version_info)[None[:3]]
+PY_3 = system_version[0] == 3
+if PY_3:
+    _MAX_INT = sys.maxsize
+    basestring = str
+    unichr = chr
+    unicode = str
+    _ustr = str
+    singleArgBuiltins = [
+     sum, len, sorted, reversed, list, tuple, set, any, all, min, 
+     max]
+else:
+    _MAX_INT = sys.maxint
+    range = xrange
+
+    def _ustr(obj):
+        """Drop-in replacement for str(obj) that tries to be Unicode
+        friendly. It first tries str(obj). If that fails with
+        a UnicodeEncodeError, then it tries unicode(obj). It then
+        < returns the unicode object | encodes it with the default
+        encoding | ... >.
+        """
+        if isinstance(obj, unicode):
+            return obj
+        try:
+            return str(obj)
+        except UnicodeEncodeError:
+            ret = unicode(obj).encode(sys.getdefaultencoding(), "xmlcharrefreplace")
+            xmlcharref = Regex("&#\\d+;")
+            xmlcharref.setParseAction(lambda t: "\\u" + hex(int(t[0][2[:-1]]))[2[:None]])
+            return xmlcharref.transformString(ret)
+
+
+    singleArgBuiltins = []
+    import __builtin__
+    for fname in "sum len sorted reversed list tuple set any all min max".split():
+        try:
+            singleArgBuiltins.append(getattr(__builtin__, fname))
+        except AttributeError:
+            continue
+
+_generatorType = type((y for y in range(1)))
+
+def _xml_escape(data):
+    """Escape &, <, >, ", ', etc. in a string of data."""
+    from_symbols = '&><"\''
+    to_symbols = ("&" + s + ";" for s in "amp gt lt quot apos".split())
+    for from_, to_ in zip(from_symbols, to_symbols):
+        data = data.replace(from_, to_)
+
+    return data
+
+
+alphas = string.ascii_uppercase + string.ascii_lowercase
+nums = "0123456789"
+hexnums = nums + "ABCDEFabcdef"
+alphanums = alphas + nums
+_bslash = chr(92)
+printables = "".join((c for c in string.printable if c not in string.whitespace))
+
+def conditionAsParseAction(fn, message=None, fatal=False):
+    msg = message if message is not None else "failed user-defined condition"
+    exc_type = ParseFatalException if fatal else ParseException
+    fn = _trim_arity(fn)
+
+    @wraps(fn)
+    def pa(s, l, t):
+        if not bool(fn(s, l, t)):
+            raise exc_type(s, l, msg)
+
+    return pa
+
+
+class ParseBaseException(Exception):
+    __doc__ = "base exception class for all parsing runtime exceptions"
+
+    def __init__(self, pstr, loc=0, msg=None, elem=None):
+        self.loc = loc
+        if msg is None:
+            self.msg = pstr
+            self.pstr = ""
+        else:
+            self.msg = msg
+            self.pstr = pstr
+        self.parserElement = elem
+        self.args = (pstr, loc, msg)
+
+    @classmethod
+    def _from_exception(cls, pe):
+        """
+        internal factory method to simplify creating one type of ParseException
+        from another - avoids having __init__ signature conflicts among subclasses
+        """
+        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+    def __getattr__(self, aname):
+        """supported attributes by name are:
+           - lineno - returns the line number of the exception text
+           - col - returns the column number of the exception text
+           - line - returns the line containing the exception text
+        """
+        if aname == "lineno":
+            return lineno(self.loc, self.pstr)
+        if aname in ('col', 'column'):
+            return col(self.loc, self.pstr)
+        if aname == "line":
+            return line(self.loc, self.pstr)
+        raise AttributeError(aname)
+
+    def __str__(self):
+        if self.pstr:
+            if self.loc >= len(self.pstr):
+                foundstr = ", found end of text"
+            else:
+                foundstr = (", found %r" % self.pstr[self.loc[:self.loc + 1]]).replace("\\\\", "\\")
+        else:
+            foundstr = ""
+        return "%s%s  (at char %d), (line:%d, col:%d)" % (
+         self.msg, foundstr, self.loc, self.lineno, self.column)
+
+    def __repr__(self):
+        return _ustr(self)
+
+    def markInputline(self, markerString='>!<'):
+        """Extracts the exception line from the input string, and marks
+           the location of the exception with a special symbol.
+        """
+        line_str = self.line
+        line_column = self.column - 1
+        if markerString:
+            line_str = "".join((line_str[None[:line_column]],
+             markerString, line_str[line_column[:None]]))
+        return line_str.strip()
+
+    def __dir__(self):
+        return "lineno col line".split() + dir(type(self))
+
+
+class ParseException(ParseBaseException):
+    __doc__ = '\n    Exception thrown when parse expressions don\'t match class;\n    supported attributes by name are:\n    - lineno - returns the line number of the exception text\n    - col - returns the column number of the exception text\n    - line - returns the line containing the exception text\n\n    Example::\n\n        try:\n            Word(nums).setName("integer").parseString("ABC")\n        except ParseException as pe:\n            print(pe)\n            print("column: {}".format(pe.col))\n\n    prints::\n\n       Expected integer (at char 0), (line:1, col:1)\n        column: 1\n\n    '
+
+    @staticmethod
+    def explain(exc, depth=16):
+        """
+        Method to take an exception and translate the Python internal traceback into a list
+        of the pyparsing expressions that caused the exception to be raised.
+
+        Parameters:
+
+         - exc - exception raised during parsing (need not be a ParseException, in support
+           of Python exceptions that might be raised in a parse action)
+         - depth (default=16) - number of levels back in the stack trace to list expression
+           and function names; if None, the full stack trace names will be listed; if 0, only
+           the failing input line, marker, and exception string will be shown
+
+        Returns a multi-line string listing the ParserElements and/or function names in the
+        exception's stack trace.
+
+        Note: the diagnostic output will include string representations of the expressions
+        that failed to parse. These representations will be more helpful if you use `setName` to
+        give identifiable names to your expressions. Otherwise they will use the default string
+        forms, which may be cryptic to read.
+
+        explain() is only supported under Python 3.
+        """
+        import inspect
+        if depth is None:
+            depth = sys.getrecursionlimit()
+        ret = []
+        if isinstance(exc, ParseBaseException):
+            ret.append(exc.line)
+            ret.append(" " * (exc.col - 1) + "^")
+        ret.append("{0}: {1}".format(type(exc).__name__, exc))
+        if depth > 0:
+            callers = inspect.getinnerframes((exc.__traceback__), context=depth)
+            seen = set()
+            for i, ff in enumerate(callers[(-depth)[:None]]):
+                frm = ff[0]
+                f_self = frm.f_locals.get("self", None)
+                if isinstance(f_self, ParserElement):
+                    if frm.f_code.co_name not in ('parseImpl', '_parseNoCache'):
+                        continue
+                    if f_self in seen:
+                        continue
+                    seen.add(f_self)
+                    self_type = type(f_self)
+                    ret.append("{0}.{1} - {2}".format(self_type.__module__, self_type.__name__, f_self))
+                else:
+                    if f_self is not None:
+                        self_type = type(f_self)
+                        ret.append("{0}.{1}".format(self_type.__module__, self_type.__name__))
+                    else:
+                        code = frm.f_code
+                        if code.co_name in ('wrapper', ''):
+                            continue
+                        ret.append("{0}".format(code.co_name))
+                depth -= 1
+                if not depth:
+                    break
+
+        return "\n".join(ret)
+
+
+class ParseFatalException(ParseBaseException):
+    __doc__ = "user-throwable exception thrown when inconsistent parse content\n       is found; stops all parsing immediately"
+
+
+class ParseSyntaxException(ParseFatalException):
+    __doc__ = "just like :class:`ParseFatalException`, but thrown internally\n    when an :class:`ErrorStop` ('-' operator) indicates\n    that parsing is to stop immediately because an unbacktrackable\n    syntax error has been found.\n    "
+
+
+class RecursiveGrammarException(Exception):
+    __doc__ = "exception thrown by :class:`ParserElement.validate` if the\n    grammar could be improperly recursive\n    "
+
+    def __init__(self, parseElementList):
+        self.parseElementTrace = parseElementList
+
+    def __str__(self):
+        return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+
+class _ParseResultsWithOffset(object):
+
+    def __init__(self, p1, p2):
+        self.tup = (
+         p1, p2)
+
+    def __getitem__(self, i):
+        return self.tup[i]
+
+    def __repr__(self):
+        return repr(self.tup[0])
+
+    def setOffset(self, i):
+        self.tup = (self.tup[0], i)
+
+
+class ParseResults(object):
+    __doc__ = 'Structured parse results, to provide multiple means of access to\n    the parsed data:\n\n       - as a list (``len(results)``)\n       - by list index (``results[0], results[1]``, etc.)\n       - by attribute (``results.`` - see :class:`ParserElement.setResultsName`)\n\n    Example::\n\n        integer = Word(nums)\n        date_str = (integer.setResultsName("year") + \'/\'\n                        + integer.setResultsName("month") + \'/\'\n                        + integer.setResultsName("day"))\n        # equivalent form:\n        # date_str = integer("year") + \'/\' + integer("month") + \'/\' + integer("day")\n\n        # parseString returns a ParseResults object\n        result = date_str.parseString("1999/12/31")\n\n        def test(s, fn=repr):\n            print("%s -> %s" % (s, fn(eval(s))))\n        test("list(result)")\n        test("result[0]")\n        test("result[\'month\']")\n        test("result.day")\n        test("\'month\' in result")\n        test("\'minutes\' in result")\n        test("result.dump()", str)\n\n    prints::\n\n        list(result) -> [\'1999\', \'/\', \'12\', \'/\', \'31\']\n        result[0] -> \'1999\'\n        result[\'month\'] -> \'12\'\n        result.day -> \'31\'\n        \'month\' in result -> True\n        \'minutes\' in result -> False\n        result.dump() -> [\'1999\', \'/\', \'12\', \'/\', \'31\']\n        - day: 31\n        - month: 12\n        - year: 1999\n    '
+
+    def __new__(cls, toklist=None, name=None, asList=True, modal=True):
+        if isinstance(toklist, cls):
+            return toklist
+        retobj = object.__new__(cls)
+        retobj._ParseResults__doinit = True
+        return retobj
+
+    def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance):
+        if self._ParseResults__doinit:
+            self._ParseResults__doinit = False
+            self._ParseResults__name = None
+            self._ParseResults__parent = None
+            self._ParseResults__accumNames = {}
+            self._ParseResults__asList = asList
+            self._ParseResults__modal = modal
+            if toklist is None:
+                toklist = []
+            elif isinstance(toklist, list):
+                self._ParseResults__toklist = toklist[None[:None]]
+            else:
+                if isinstance(toklist, _generatorType):
+                    self._ParseResults__toklist = list(toklist)
+                else:
+                    self._ParseResults__toklist = [
+                     toklist]
+            self._ParseResults__tokdict = dict()
+        if name is not None and name:
+            if not modal:
+                self._ParseResults__accumNames[name] = 0
+            elif isinstance(name, int):
+                name = _ustr(name)
+            self._ParseResults__name = name
+            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None, "", [])):
+                if isinstance(toklist, basestring):
+                    toklist = [
+                     toklist]
+                if asList:
+                    if isinstance(toklist, ParseResults):
+                        self[name] = _ParseResultsWithOffset(ParseResults(toklist._ParseResults__toklist), 0)
+            else:
+                self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]), 0)
+            self[name]._ParseResults__name = name
+        else:
+            try:
+                self[name] = toklist[0]
+            except (KeyError, TypeError, IndexError):
+                self[name] = toklist
+
+    def __getitem__(self, i):
+        if isinstance(i, (int, slice)):
+            return self._ParseResults__toklist[i]
+        if i not in self._ParseResults__accumNames:
+            return self._ParseResults__tokdict[i][-1][0]
+        return ParseResults([v[0] for v in self._ParseResults__tokdict[i]])
+
+    def __setitem__(self, k, v, isinstance=isinstance):
+        if isinstance(v, _ParseResultsWithOffset):
+            self._ParseResults__tokdict[k] = self._ParseResults__tokdict.get(k, list()) + [v]
+            sub = v[0]
+        else:
+            if isinstance(k, (int, slice)):
+                self._ParseResults__toklist[k] = v
+                sub = v
+            else:
+                self._ParseResults__tokdict[k] = self._ParseResults__tokdict.get(k, list()) + [_ParseResultsWithOffset(v, 0)]
+                sub = v
+        if isinstance(sub, ParseResults):
+            sub._ParseResults__parent = wkref(self)
+
+    def __delitem__(self, i):
+        if isinstance(i, (int, slice)):
+            mylen = len(self._ParseResults__toklist)
+            del self._ParseResults__toklist[i]
+            if isinstance(i, int):
+                if i < 0:
+                    i += mylen
+                i = slice(i, i + 1)
+            removed = list(range(*i.indices(mylen)))
+            removed.reverse()
+            for name, occurrences in self._ParseResults__tokdict.items():
+                for j in removed:
+                    for k, (value, position) in enumerate(occurrences):
+                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+
+        else:
+            del self._ParseResults__tokdict[i]
+
+    def __contains__(self, k):
+        return k in self._ParseResults__tokdict
+
+    def __len__(self):
+        return len(self._ParseResults__toklist)
+
+    def __bool__(self):
+        return not not self._ParseResults__toklist
+
+    __nonzero__ = __bool__
+
+    def __iter__(self):
+        return iter(self._ParseResults__toklist)
+
+    def __reversed__(self):
+        return iter(self._ParseResults__toklist[None[None:-1]])
+
+    def _iterkeys(self):
+        if hasattr(self._ParseResults__tokdict, "iterkeys"):
+            return self._ParseResults__tokdict.iterkeys()
+        return iter(self._ParseResults__tokdict)
+
+    def _itervalues(self):
+        return (self[k] for k in self._iterkeys())
+
+    def _iteritems(self):
+        return ((k, self[k]) for k in self._iterkeys())
+
+    if PY_3:
+        keys = _iterkeys
+        values = _itervalues
+        items = _iteritems
+    else:
+        iterkeys = _iterkeys
+        itervalues = _itervalues
+        iteritems = _iteritems
+
+        def keys(self):
+            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iterkeys())
+
+        def values(self):
+            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.itervalues())
+
+        def items(self):
+            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iteritems())
+
+    def haskeys(self):
+        """Since keys() returns an iterator, this method is helpful in bypassing
+           code that looks for the existence of any defined results names."""
+        return bool(self._ParseResults__tokdict)
+
+    def pop(self, *args, **kwargs):
+        """
+        Removes and returns item at specified index (default= ``last``).
+        Supports both ``list`` and ``dict`` semantics for ``pop()``. If
+        passed no argument or an integer argument, it will use ``list``
+        semantics and pop tokens from the list of parsed tokens. If passed
+        a non-integer argument (most likely a string), it will use ``dict``
+        semantics and pop the corresponding value from any defined results
+        names. A second default return value argument is supported, just as in
+        ``dict.pop()``.
+
+        Example::
+
+            def remove_first(tokens):
+                tokens.pop(0)
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+            label = Word(alphas)
+            patt = label("LABEL") + OneOrMore(Word(nums))
+            print(patt.parseString("AAB 123 321").dump())
+
+            # Use pop() in a parse action to remove named result (note that corresponding value is not
+            # removed from list form of results)
+            def remove_LABEL(tokens):
+                tokens.pop("LABEL")
+                return tokens
+            patt.addParseAction(remove_LABEL)
+            print(patt.parseString("AAB 123 321").dump())
+
+        prints::
+
+            ['AAB', '123', '321']
+            - LABEL: AAB
+
+            ['AAB', '123', '321']
+        """
+        if not args:
+            args = [
+             -1]
+        for k, v in kwargs.items():
+            if k == "default":
+                args = (
+                 args[0], v)
+            else:
+                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+
+        if isinstance(args[0], int) or len(args) == 1 or args[0] in self:
+            index = args[0]
+            ret = self[index]
+            del self[index]
+            return ret
+        defaultvalue = args[1]
+        return defaultvalue
+
+    def get(self, key, defaultValue=None):
+        """
+        Returns named result matching the given key, or if there is no
+        such name, then returns the given ``defaultValue`` or ``None`` if no
+        ``defaultValue`` is specified.
+
+        Similar to ``dict.get()``.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString("1999/12/31")
+            print(result.get("year")) # -> '1999'
+            print(result.get("hour", "not specified")) # -> 'not specified'
+            print(result.get("hour")) # -> None
+        """
+        if key in self:
+            return self[key]
+        return defaultValue
+
+    def insert(self, index, insStr):
+        """
+        Inserts new element at location index in the list of parsed tokens.
+
+        Similar to ``list.insert()``.
+
+        Example::
+
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to insert the parse location in the front of the parsed results
+            def insert_locn(locn, tokens):
+                tokens.insert(0, locn)
+            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+        """
+        self._ParseResults__toklist.insert(index, insStr)
+        for name, occurrences in self._ParseResults__tokdict.items():
+            for k, (value, position) in enumerate(occurrences):
+                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+    def append(self, item):
+        """
+        Add single element to end of ParseResults list of elements.
+
+        Example::
+
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to compute the sum of the parsed integers, and add it to the end
+            def append_sum(tokens):
+                tokens.append(sum(map(int, tokens)))
+            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+        """
+        self._ParseResults__toklist.append(item)
+
+    def extend(self, itemseq):
+        """
+        Add sequence of elements to end of ParseResults list of elements.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+
+            # use a parse action to append the reverse of the matched strings, to make a palindrome
+            def make_palindrome(tokens):
+                tokens.extend(reversed([t[::-1] for t in tokens]))
+                return ''.join(tokens)
+            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+        """
+        if isinstance(itemseq, ParseResults):
+            self.__iadd__(itemseq)
+        else:
+            self._ParseResults__toklist.extend(itemseq)
+
+    def clear(self):
+        """
+        Clear all elements and results names.
+        """
+        del self._ParseResults__toklist[None[:None]]
+        self._ParseResults__tokdict.clear()
+
+    def __getattr__(self, name):
+        try:
+            return self[name]
+        except KeyError:
+            return ""
+
+    def __add__(self, other):
+        ret = self.copy()
+        ret += other
+        return ret
+
+    def __iadd__(self, other):
+        if other._ParseResults__tokdict:
+            offset = len(self._ParseResults__toklist)
+            addoffset = lambda a:             if a < 0:
+offset # Avoid dead code: a + offset
+            otheritems = other._ParseResults__tokdict.items()
+            otherdictitems = [(k, _ParseResultsWithOffset(v[0], addoffset(v[1]))) for k, vlist in otheritems for v in iter(vlist)]
+            for k, v in otherdictitems:
+                self[k] = v
+                if isinstance(v[0], ParseResults):
+                    v[0]._ParseResults__parent = wkref(self)
+
+        self._ParseResults__toklist += other._ParseResults__toklist
+        self._ParseResults__accumNames.update(other._ParseResults__accumNames)
+        return self
+
+    def __radd__(self, other):
+        if isinstance(other, int):
+            if other == 0:
+                return self.copy()
+        return other + self
+
+    def __repr__(self):
+        return "(%s, %s)" % (repr(self._ParseResults__toklist), repr(self._ParseResults__tokdict))
+
+    def __str__(self):
+        return "[" + ", ".join((_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self._ParseResults__toklist)) + "]"
+
+    def _asStringList(self, sep=''):
+        out = []
+        for item in self._ParseResults__toklist:
+            if out:
+                if sep:
+                    out.append(sep)
+            if isinstance(item, ParseResults):
+                out += item._asStringList()
+            else:
+                out.append(_ustr(item))
+
+        return out
+
+    def asList(self):
+        """
+        Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+            result = patt.parseString("sldkj lsdkj sldkj")
+            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+            print(type(result), result) # ->  ['sldkj', 'lsdkj', 'sldkj']
+
+            # Use asList() to create an actual list
+            result_list = result.asList()
+            print(type(result_list), result_list) # ->  ['sldkj', 'lsdkj', 'sldkj']
+        """
+        return [res.asList() if isinstance(res, ParseResults) else res for res in self._ParseResults__toklist]
+
+    def asDict(self):
+        """
+        Returns the named parse results as a nested dictionary.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString('12/31/1999')
+            print(type(result), repr(result)) # ->  (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+
+            result_dict = result.asDict()
+            print(type(result_dict), repr(result_dict)) # ->  {'day': '1999', 'year': '12', 'month': '31'}
+
+            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+            import json
+            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+        """
+        if PY_3:
+            item_fn = self.items
+        else:
+            item_fn = self.iteritems
+
+        def toItem(obj):
+            if isinstance(obj, ParseResults):
+                if obj.haskeys():
+                    return obj.asDict()
+                return [toItem(v) for v in obj]
+            else:
+                return obj
+
+        return dict(((k, toItem(v)) for k, v in item_fn()))
+
+    def copy(self):
+        """
+        Returns a new copy of a :class:`ParseResults` object.
+        """
+        ret = ParseResults(self._ParseResults__toklist)
+        ret._ParseResults__tokdict = dict(self._ParseResults__tokdict.items())
+        ret._ParseResults__parent = self._ParseResults__parent
+        ret._ParseResults__accumNames.update(self._ParseResults__accumNames)
+        ret._ParseResults__name = self._ParseResults__name
+        return ret
+
+    def asXML(self, doctag=None, namedItemsOnly=False, indent='', formatted=True):
+        """
+        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+        """
+        nl = "\n"
+        out = []
+        namedItems = dict(((v[1], k) for k, vlist in self._ParseResults__tokdict.items() for v in vlist))
+        nextLevelIndent = indent + "  "
+        if not formatted:
+            indent = ""
+            nextLevelIndent = ""
+            nl = ""
+        else:
+            selfTag = None
+            if doctag is not None:
+                selfTag = doctag
+            else:
+                if self._ParseResults__name:
+                    selfTag = self._ParseResults__name
+        if not selfTag:
+            if namedItemsOnly:
+                return ""
+            selfTag = "ITEM"
+        out += [nl, indent, '<', selfTag, '>']
+        for i, res in enumerate(self._ParseResults__toklist):
+            if isinstance(res, ParseResults):
+                if i in namedItems:
+                    out += [
+                     res.asXML(namedItems[i], namedItemsOnly and doctag is None, nextLevelIndent, formatted)]
+                else:
+                    out += [
+                     res.asXML(None, namedItemsOnly and doctag is None, nextLevelIndent, formatted)]
+            else:
+                resTag = None
+                if i in namedItems:
+                    resTag = namedItems[i]
+                if not resTag:
+                    if namedItemsOnly:
+                        continue
+                    else:
+                        resTag = "ITEM"
+                xmlBodyText = _xml_escape(_ustr(res))
+                out += [nl, nextLevelIndent, '<', resTag, '>', 
+                 xmlBodyText, 
+                 '']
+
+        out += [nl, indent, '']
+        return "".join(out)
+
+    def __lookup(self, sub):
+        for k, vlist in self._ParseResults__tokdict.items():
+            for v, loc in vlist:
+                if sub is v:
+                    return k
+
+    def getName(self):
+        r"""
+        Returns the results name for this token expression. Useful when several
+        different expressions might match at a particular location.
+
+        Example::
+
+            integer = Word(nums)
+            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+            house_number_expr = Suppress('#') + Word(nums, alphanums)
+            user_data = (Group(house_number_expr)("house_number")
+                        | Group(ssn_expr)("ssn")
+                        | Group(integer)("age"))
+            user_info = OneOrMore(user_data)
+
+            result = user_info.parseString("22 111-22-3333 #221B")
+            for item in result:
+                print(item.getName(), ':', item[0])
+
+        prints::
+
+            age : 22
+            ssn : 111-22-3333
+            house_number : 221B
+        """
+        if self._ParseResults__name:
+            return self._ParseResults__name
+            if self._ParseResults__parent:
+                par = self._ParseResults__parent()
+                if par:
+                    return par._ParseResults__lookup(self)
+                return
+        elif len(self) == 1:
+            if len(self._ParseResults__tokdict) == 1:
+                if next(iter(self._ParseResults__tokdict.values()))[0][1] in (0, -1):
+                    return next(iter(self._ParseResults__tokdict.keys()))
+        return
+
+    def dump(self, indent='', full=True, include_list=True, _depth=0):
+        """
+        Diagnostic method for listing out the contents of
+        a :class:`ParseResults`. Accepts an optional ``indent`` argument so
+        that this string can be embedded in a nested display of other data.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            result = date_str.parseString('12/31/1999')
+            print(result.dump())
+
+        prints::
+
+            ['12', '/', '31', '/', '1999']
+            - day: 1999
+            - month: 31
+            - year: 12
+        """
+        out = []
+        NL = "\n"
+        if include_list:
+            out.append(indent + _ustr(self.asList()))
+        else:
+            out.append("")
+        if full:
+            if self.haskeys():
+                items = sorted(((str(k), v) for k, v in self.items()))
+                for k, v in items:
+                    if out:
+                        out.append(NL)
+                    out.append("%s%s- %s: " % (indent, "  " * _depth, k))
+                    if isinstance(v, ParseResults):
+                        if v:
+                            out.append(v.dump(indent=indent, full=full, include_list=include_list, _depth=(_depth + 1)))
+                        else:
+                            out.append(_ustr(v))
+                    else:
+                        out.append(repr(v))
+
+            else:
+                if any((isinstance(vv, ParseResults) for vv in self)):
+                    v = self
+                    for i, vv in enumerate(v):
+                        if isinstance(vv, ParseResults):
+                            out.append("\n%s%s[%d]:\n%s%s%s" % (indent,
+                             "  " * _depth,
+                             i,
+                             indent,
+                             "  " * (_depth + 1),
+                             vv.dump(indent=indent, full=full,
+                               include_list=include_list,
+                               _depth=(_depth + 1))))
+                        else:
+                            out.append("\n%s%s[%d]:\n%s%s%s" % (indent,
+                             "  " * _depth,
+                             i,
+                             indent,
+                             "  " * (_depth + 1),
+                             _ustr(vv)))
+
+        return "".join(out)
+
+    def pprint(self, *args, **kwargs):
+        """
+        Pretty-printer for parsed results as a list, using the
+        `pprint `_ module.
+        Accepts additional positional or keyword args as defined for
+        `pprint.pprint `_ .
+
+        Example::
+
+            ident = Word(alphas, alphanums)
+            num = Word(nums)
+            func = Forward()
+            term = ident | num | Group('(' + func + ')')
+            func <<= ident + Group(Optional(delimitedList(term)))
+            result = func.parseString("fna a,b,(fnb c,d,200),100")
+            result.pprint(width=40)
+
+        prints::
+
+            ['fna',
+             ['a',
+              'b',
+              ['(', 'fnb', ['c', 'd', '200'], ')'],
+              '100']]
+        """
+        (pprint.pprint)(self.asList(), *args, **kwargs)
+
+    def __getstate__(self):
+        return (
+         self._ParseResults__toklist,
+         (
+          self._ParseResults__tokdict.copy(),
+          self._ParseResults__parent is not None and self._ParseResults__parent() or None,
+          self._ParseResults__accumNames,
+          self._ParseResults__name))
+
+    def __setstate__(self, state):
+        self._ParseResults__toklist = state[0]
+        self._ParseResults__tokdict, par, inAccumNames, self._ParseResults__name = state[1]
+        self._ParseResults__accumNames = {}
+        self._ParseResults__accumNames.update(inAccumNames)
+        if par is not None:
+            self._ParseResults__parent = wkref(par)
+        else:
+            self._ParseResults__parent = None
+
+    def __getnewargs__(self):
+        return (self._ParseResults__toklist, self._ParseResults__name, self._ParseResults__asList, self._ParseResults__modal)
+
+    def __dir__(self):
+        return dir(type(self)) + list(self.keys())
+
+    @classmethod
+    def from_dict(cls, other, name=None):
+        """
+        Helper classmethod to construct a ParseResults from a dict, preserving the
+        name-value relations as results names. If an optional 'name' argument is
+        given, a nested ParseResults will be returned
+        """
+
+        def is_iterable(obj):
+            try:
+                iter(obj)
+            except Exception:
+                return False
+            else:
+                if PY_3:
+                    return not isinstance(obj, (str, bytes))
+                return not isinstance(obj, basestring)
+
+        ret = cls([])
+        for k, v in other.items():
+            if isinstance(v, Mapping):
+                ret += cls.from_dict(v, name=k)
+            else:
+                ret += cls([v], name=k, asList=(is_iterable(v)))
+
+        if name is not None:
+            ret = cls([ret], name=name)
+        return ret
+
+
+MutableMapping.register(ParseResults)
+
+def col(loc, strg):
+    """Returns current column within a string, counting newlines as line separators.
+   The first column is number 1.
+
+   Note: the default parsing behavior is to expand tabs in the input string
+   before starting the parsing process.  See
+   :class:`ParserElement.parseString` for more
+   information on parsing strings containing ```` s, and suggested
+   methods to maintain a consistent view of the parsed string, the parse
+   location, and line and column positions within the parsed string.
+   """
+    s = strg
+    if 0 < loc < len(s):
+        if s[loc - 1] == "\n":
+            return 1
+    return loc - s.rfind("\n", 0, loc)
+
+
+def lineno(loc, strg):
+    """Returns current line number within a string, counting newlines as line separators.
+    The first line is number 1.
+
+    Note - the default parsing behavior is to expand tabs in the input string
+    before starting the parsing process.  See :class:`ParserElement.parseString`
+    for more information on parsing strings containing ```` s, and
+    suggested methods to maintain a consistent view of the parsed string, the
+    parse location, and line and column positions within the parsed string.
+    """
+    return strg.count("\n", 0, loc) + 1
+
+
+def line(loc, strg):
+    """Returns the line of text containing loc within a string, counting newlines as line separators.
+       """
+    lastCR = strg.rfind("\n", 0, loc)
+    nextCR = strg.find("\n", loc)
+    if nextCR >= 0:
+        return strg[(lastCR + 1)[:nextCR]]
+    return strg[(lastCR + 1)[:None]]
+
+
+def _defaultStartDebugAction(instring, loc, expr):
+    print("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % (lineno(loc, instring), col(loc, instring)))
+
+
+def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks):
+    print("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+
+def _defaultExceptionDebugAction(instring, loc, expr, exc):
+    print("Exception raised:" + _ustr(exc))
+
+
+def nullDebugAction(*args):
+    """'Do-nothing' debug action, to suppress debugging output during parsing."""
+    pass
+
+
+def _trim_arity(func, maxargs=2):
+    if func in singleArgBuiltins:
+        return (lambda s, l, t: func(t))
+    limit = [
+     0]
+    foundArity = [False]
+    if system_version[None[:2]] >= (3, 5):
+
+        def extract_stack(limit=0):
+            offset = -3 if system_version == (3, 5, 0) else -2
+            frame_summary = traceback.extract_stack(limit=(-offset + limit - 1))[offset]
+            return [frame_summary[None[:2]]]
+
+        def extract_tb(tb, limit=0):
+            frames = traceback.extract_tb(tb, limit=limit)
+            frame_summary = frames[-1]
+            return [frame_summary[None[:2]]]
+
+    else:
+        extract_stack = traceback.extract_stack
+        extract_tb = traceback.extract_tb
+    LINE_DIFF = 6
+    this_line = extract_stack(limit=2)[-1]
+    pa_call_line_synth = (this_line[0], this_line[1] + LINE_DIFF)
+
+    def wrapper(*args):
+        while True:
+            try:
+                ret = func(*args[limit[0][:None]])
+                foundArity[0] = True
+                return ret
+            except TypeError:
+                if foundArity[0]:
+                    raise
+                else:
+                    try:
+                        tb = sys.exc_info()[-1]
+                        if not extract_tb(tb, limit=2)[-1][None[:2]] == pa_call_line_synth:
+                            raise
+                    finally:
+                        try:
+                            del tb
+                        except NameError:
+                            pass
+
+                if limit[0] <= maxargs:
+                    limit[0] += 1
+                    continue
+                raise
+
+    func_name = ""
+    try:
+        func_name = getattr(func, "__name__", getattr(func, "__class__").__name__)
+    except Exception:
+        func_name = str(func)
+
+    wrapper.__name__ = func_name
+    return wrapper
+
+
+class ParserElement(object):
+    __doc__ = "Abstract base level parser element class."
+    DEFAULT_WHITE_CHARS = " \n\t\r"
+    verbose_stacktrace = False
+
+    @staticmethod
+    def setDefaultWhitespaceChars(chars):
+        r"""
+        Overrides the default whitespace chars
+
+        Example::
+
+            # default whitespace chars are space,  and newline
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
+
+            # change to just treat newline as significant
+            ParserElement.setDefaultWhitespaceChars(" \t")
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
+        """
+        ParserElement.DEFAULT_WHITE_CHARS = chars
+
+    @staticmethod
+    def inlineLiteralsUsing(cls):
+        """
+        Set class to be used for inclusion of string literals into a parser.
+
+        Example::
+
+            # default literal class used is Literal
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+            # change to Suppress
+            ParserElement.inlineLiteralsUsing(Suppress)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
+        """
+        ParserElement._literalStringClass = cls
+
+    @classmethod
+    def _trim_traceback(cls, tb):
+        while tb.tb_next:
+            tb = tb.tb_next
+
+        return tb
+
+    def __init__(self, savelist=False):
+        self.parseAction = list()
+        self.failAction = None
+        self.strRepr = None
+        self.resultsName = None
+        self.saveAsList = savelist
+        self.skipWhitespace = True
+        self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS)
+        self.copyDefaultWhiteChars = True
+        self.mayReturnEmpty = False
+        self.keepTabs = False
+        self.ignoreExprs = list()
+        self.debug = False
+        self.streamlined = False
+        self.mayIndexError = True
+        self.errmsg = ""
+        self.modalResults = True
+        self.debugActions = (None, None, None)
+        self.re = None
+        self.callPreparse = True
+        self.callDuringTry = False
+
+    def copy(self):
+        """
+        Make a copy of this :class:`ParserElement`.  Useful for defining
+        different parse actions for the same parsing pattern, using copies of
+        the original parse element.
+
+        Example::
+
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            integerK = integer.copy().addParseAction(lambda toks: toks[0] * 1024) + Suppress("K")
+            integerM = integer.copy().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M")
+
+            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+
+        prints::
+
+            [5120, 100, 655360, 268435456]
+
+        Equivalent form of ``expr.copy()`` is just ``expr()``::
+
+            integerM = integer().addParseAction(lambda toks: toks[0] * 1024 * 1024) + Suppress("M")
+        """
+        cpy = copy.copy(self)
+        cpy.parseAction = self.parseAction[None[:None]]
+        cpy.ignoreExprs = self.ignoreExprs[None[:None]]
+        if self.copyDefaultWhiteChars:
+            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        return cpy
+
+    def setName(self, name):
+        """
+        Define name for this expression, makes debugging and exception messages clearer.
+
+        Example::
+
+            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
+        """
+        self.name = name
+        self.errmsg = "Expected " + self.name
+        if __diag__.enable_debug_on_named_expressions:
+            self.setDebug()
+        return self
+
+    def setResultsName(self, name, listAllMatches=False):
+        """
+        Define name for referencing matching tokens as a nested attribute
+        of the returned parse results.
+        NOTE: this returns a *copy* of the original :class:`ParserElement` object;
+        this is so that the client can define a basic element, such as an
+        integer, and reference it in multiple places with different names.
+
+        You can also set results names using the abbreviated syntax,
+        ``expr("name")`` in place of ``expr.setResultsName("name")``
+        - see :class:`__call__`.
+
+        Example::
+
+            date_str = (integer.setResultsName("year") + '/'
+                        + integer.setResultsName("month") + '/'
+                        + integer.setResultsName("day"))
+
+            # equivalent form:
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+        """
+        return self._setResultsName(name, listAllMatches)
+
+    def _setResultsName(self, name, listAllMatches=False):
+        newself = self.copy()
+        if name.endswith("*"):
+            name = name[None[:-1]]
+            listAllMatches = True
+        newself.resultsName = name
+        newself.modalResults = not listAllMatches
+        return newself
+
+    def setBreak(self, breakFlag=True):
+        """Method to invoke the Python pdb debugger when this element is
+           about to be parsed. Set ``breakFlag`` to True to enable, False to
+           disable.
+        """
+        if breakFlag:
+            _parseMethod = self._parse
+
+            def breaker(instring, loc, doActions=True, callPreParse=True):
+                import pdb
+                pdb.set_trace()
+                return _parseMethod(instring, loc, doActions, callPreParse)
+
+            breaker._originalParseMethod = _parseMethod
+            self._parse = breaker
+        else:
+            if hasattr(self._parse, "_originalParseMethod"):
+                self._parse = self._parse._originalParseMethod
+        return self
+
+    def setParseAction(self, *fns, **kwargs):
+        """
+        Define one or more actions to perform when successfully matching parse element definition.
+        Parse action fn is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` ,
+        ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where:
+
+        - s   = the original string being parsed (see note below)
+        - loc = the location of the matching substring
+        - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object
+
+        If the functions in fns modify the tokens, they can return them as the return
+        value from fn, and the modified list of tokens will replace the original.
+        Otherwise, fn does not need to return any value.
+
+        If None is passed as the parse action, all previously added parse actions for this
+        expression are cleared.
+
+        Optional keyword arguments:
+        - callDuringTry = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing
+
+        Note: the default parsing behavior is to expand tabs in the input string
+        before starting the parsing process.  See :class:`parseString for more
+        information on parsing strings containing ```` s, and suggested
+        methods to maintain a consistent view of the parsed string, the parse
+        location, and line and column positions within the parsed string.
+
+        Example::
+
+            integer = Word(nums)
+            date_str = integer + '/' + integer + '/' + integer
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+            # use parse action to convert to ints at parse time
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            date_str = integer + '/' + integer + '/' + integer
+
+            # note that integer fields are now ints, not strings
+            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
+        """
+        if list(fns) == [None]:
+            self.parseAction = []
+        else:
+            if not all((callable(fn) for fn in fns)):
+                raise TypeError("parse actions must be callable")
+            self.parseAction = list(map(_trim_arity, list(fns)))
+            self.callDuringTry = kwargs.get("callDuringTry", False)
+        return self
+
+    def addParseAction(self, *fns, **kwargs):
+        """
+        Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`.
+
+        See examples in :class:`copy`.
+        """
+        self.parseAction += list(map(_trim_arity, list(fns)))
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def addCondition(self, *fns, **kwargs):
+        """Add a boolean predicate function to expression's list of parse actions. See
+        :class:`setParseAction` for function call signatures. Unlike ``setParseAction``,
+        functions passed to ``addCondition`` need to return boolean success/fail of the condition.
+
+        Optional keyword arguments:
+        - message = define a custom message to be used in the raised exception
+        - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+
+        Example::
+
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            year_int = integer.copy()
+            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+            date_str = year_int + '/' + integer + '/' + integer
+
+            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+        """
+        for fn in fns:
+            self.parseAction.append(conditionAsParseAction(fn, message=(kwargs.get("message")), fatal=(kwargs.get("fatal", False))))
+
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def setFailAction(self, fn):
+        """Define action to perform if parsing fails at this expression.
+           Fail acton fn is a callable function that takes the arguments
+           ``fn(s, loc, expr, err)`` where:
+           - s = string being parsed
+           - loc = location where expression match was attempted and failed
+           - expr = the parse expression that failed
+           - err = the exception thrown
+           The function returns no value.  It may throw :class:`ParseFatalException`
+           if it is desired to stop parsing immediately."""
+        self.failAction = fn
+        return self
+
+    def _skipIgnorables(self, instring, loc):
+        exprsFound = True
+        while exprsFound:
+            exprsFound = False
+            for e in self.ignoreExprs:
+                try:
+                    while True:
+                        loc, dummy = e._parse(instring, loc)
+                        exprsFound = True
+
+                except ParseException:
+                    pass
+
+        return loc
+
+    def preParse(self, instring, loc):
+        if self.ignoreExprs:
+            loc = self._skipIgnorables(instring, loc)
+        if self.skipWhitespace:
+            wt = self.whiteChars
+            instrlen = len(instring)
+            while loc < instrlen and instring[loc] in wt:
+                loc += 1
+
+        return loc
+
+    def parseImpl(self, instring, loc, doActions=True):
+        return (
+         loc, [])
+
+    def postParse(self, instring, loc, tokenlist):
+        return tokenlist
+
+    def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True):
+        TRY, MATCH, FAIL = (0, 1, 2)
+        debugging = self.debug
+        if debugging or self.failAction:
+            if self.debugActions[TRY]:
+                self.debugActions[TRY](instring, loc, self)
+            try:
+                if callPreParse and self.callPreparse:
+                    preloc = self.preParse(instring, loc)
+                else:
+                    preloc = loc
+                tokensStart = preloc
+                if self.mayIndexError or preloc >= len(instring):
+                    try:
+                        loc, tokens = self.parseImpl(instring, preloc, doActions)
+                    except IndexError:
+                        raise ParseException(instring, len(instring), self.errmsg, self)
+
+                else:
+                    loc, tokens = self.parseImpl(instring, preloc, doActions)
+            except Exception as err:
+                try:
+                    if self.debugActions[FAIL]:
+                        self.debugActions[FAIL](instring, tokensStart, self, err)
+                    if self.failAction:
+                        self.failAction(instring, tokensStart, self, err)
+                    raise
+                finally:
+                    err = None
+                    del err
+
+        else:
+            if callPreParse:
+                if self.callPreparse:
+                    preloc = self.preParse(instring, loc)
+                else:
+                    preloc = loc
+                tokensStart = preloc
+                if self.mayIndexError or preloc >= len(instring):
+                    try:
+                        loc, tokens = self.parseImpl(instring, preloc, doActions)
+                    except IndexError:
+                        raise ParseException(instring, len(instring), self.errmsg, self)
+
+                else:
+                    loc, tokens = self.parseImpl(instring, preloc, doActions)
+            else:
+                tokens = self.postParse(instring, loc, tokens)
+                retTokens = ParseResults(tokens, (self.resultsName), asList=(self.saveAsList), modal=(self.modalResults))
+                if self.parseAction and not doActions:
+                    if self.callDuringTry:
+                        if debugging:
+                            try:
+                                for fn in self.parseAction:
+                                    try:
+                                        tokens = fn(instring, tokensStart, retTokens)
+                                    except IndexError as parse_action_exc:
+                                        try:
+                                            exc = ParseException("exception raised in parse action")
+                                            exc.__cause__ = parse_action_exc
+                                            raise exc
+                                        finally:
+                                            parse_action_exc = None
+                                            del parse_action_exc
+
+                                    if tokens is not None and tokens is not retTokens:
+                                        retTokens = ParseResults(tokens, (self.resultsName),
+                                          asList=(self.saveAsList and isinstance(tokens, (ParseResults, list))),
+                                          modal=(self.modalResults))
+
+                            except Exception as err:
+                                try:
+                                    if self.debugActions[FAIL]:
+                                        self.debugActions[FAIL](instring, tokensStart, self, err)
+                                    raise
+                                finally:
+                                    err = None
+                                    del err
+
+                        else:
+                            for fn in self.parseAction:
+                                try:
+                                    tokens = fn(instring, tokensStart, retTokens)
+                                except IndexError as parse_action_exc:
+                                    try:
+                                        exc = ParseException("exception raised in parse action")
+                                        exc.__cause__ = parse_action_exc
+                                        raise exc
+                                    finally:
+                                        parse_action_exc = None
+                                        del parse_action_exc
+
+                                if tokens is not None and tokens is not retTokens:
+                                    retTokens = ParseResults(tokens, (self.resultsName),
+                                      asList=(self.saveAsList and isinstance(tokens, (ParseResults, list))),
+                                      modal=(self.modalResults))
+
+            if debugging:
+                if self.debugActions[MATCH]:
+                    self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens)
+            return (
+             loc, retTokens)
+
+    def tryParse(self, instring, loc):
+        try:
+            return self._parse(instring, loc, doActions=False)[0]
+        except ParseFatalException:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+    def canParseNext(self, instring, loc):
+        try:
+            self.tryParse(instring, loc)
+        except (ParseException, IndexError):
+            return False
+        else:
+            return True
+
+    class _UnboundedCache(object):
+
+        def __init__(self):
+            cache = {}
+            self.not_in_cache = not_in_cache = object()
+
+            def get(self, key):
+                return cache.get(key, not_in_cache)
+
+            def set(self, key, value):
+                cache[key] = value
+
+            def clear(self):
+                cache.clear()
+
+            def cache_len(self):
+                return len(cache)
+
+            self.get = types.MethodType(get, self)
+            self.set = types.MethodType(set, self)
+            self.clear = types.MethodType(clear, self)
+            self.__len__ = types.MethodType(cache_len, self)
+
+    if _OrderedDict is not None:
+
+        class _FifoCache(object):
+
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+                cache = _OrderedDict()
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(cache) > size:
+                        try:
+                            cache.popitem(False)
+                        except KeyError:
+                            pass
+
+                def clear(self):
+                    cache.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    else:
+
+        class _FifoCache(object):
+
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+                cache = {}
+                key_fifo = collections.deque([], size)
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    while len(key_fifo) > size:
+                        cache.pop(key_fifo.popleft(), None)
+
+                    key_fifo.append(key)
+
+                def clear(self):
+                    cache.clear()
+                    key_fifo.clear()
+
+                def cache_len(self):
+                    return len(cache)
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+                self.__len__ = types.MethodType(cache_len, self)
+
+    packrat_cache = {}
+    packrat_cache_lock = RLock()
+    packrat_cache_stats = [0, 0]
+
+    def _parseCache(self, instring, loc, doActions=True, callPreParse=True):
+        HIT, MISS = (0, 1)
+        lookup = (self, instring, loc, callPreParse, doActions)
+        with ParserElement.packrat_cache_lock:
+            cache = ParserElement.packrat_cache
+            value = cache.get(lookup)
+            if value is cache.not_in_cache:
+                ParserElement.packrat_cache_stats[MISS] += 1
+                try:
+                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
+                except ParseBaseException as pe:
+                    try:
+                        cache.set(lookup, (pe.__class__)(*pe.args))
+                        raise
+                    finally:
+                        pe = None
+                        del pe
+
+                else:
+                    cache.set(lookup, (value[0], value[1].copy()))
+                    return value
+            else:
+                ParserElement.packrat_cache_stats[HIT] += 1
+                if isinstance(value, Exception):
+                    raise value
+                return (
+                 value[0], value[1].copy())
+
+    _parse = _parseNoCache
+
+    @staticmethod
+    def resetCache():
+        ParserElement.packrat_cache.clear()
+        ParserElement.packrat_cache_stats[None[:None]] = [0] * len(ParserElement.packrat_cache_stats)
+
+    _packratEnabled = False
+
+    @staticmethod
+    def enablePackrat(cache_size_limit=128):
+        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+           Repeated parse attempts at the same string location (which happens
+           often in many complex grammars) can immediately return a cached value,
+           instead of re-executing parsing/validating code.  Memoizing is done of
+           both valid results and parsing exceptions.
+
+           Parameters:
+
+           - cache_size_limit - (default= ``128``) - if an integer value is provided
+             will limit the size of the packrat cache; if None is passed, then
+             the cache size will be unbounded; if 0 is passed, the cache will
+             be effectively disabled.
+
+           This speedup may break existing programs that use parse actions that
+           have side-effects.  For this reason, packrat parsing is disabled when
+           you first import pyparsing.  To activate the packrat feature, your
+           program must call the class method :class:`ParserElement.enablePackrat`.
+           For best results, call ``enablePackrat()`` immediately after
+           importing pyparsing.
+
+           Example::
+
+               import pyparsing
+               pyparsing.ParserElement.enablePackrat()
+        """
+        if not ParserElement._packratEnabled:
+            ParserElement._packratEnabled = True
+            if cache_size_limit is None:
+                ParserElement.packrat_cache = ParserElement._UnboundedCache()
+            else:
+                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+            ParserElement._parse = ParserElement._parseCache
+
+    def parseString(self, instring, parseAll=False):
+        """
+        Execute the parse expression with the given string.
+        This is the main interface to the client code, once the complete
+        expression has been built.
+
+        Returns the parsed data as a :class:`ParseResults` object, which may be
+        accessed as a list, or as a dict or object with attributes if the given parser
+        includes results names.
+
+        If you want the grammar to require that the entire input string be
+        successfully parsed, then set ``parseAll`` to True (equivalent to ending
+        the grammar with ``StringEnd()``).
+
+        Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string,
+        in order to report proper column numbers in parse actions.
+        If the input string contains tabs and
+        the grammar uses parse actions that use the ``loc`` argument to index into the
+        string being parsed, you can ensure you have a consistent view of the input
+        string by:
+
+        - calling ``parseWithTabs`` on your grammar before calling ``parseString``
+          (see :class:`parseWithTabs`)
+        - define your parse action using the full ``(s, loc, toks)`` signature, and
+          reference the input string using the parse action's ``s`` argument
+        - explictly expand the tabs in your input string before calling
+          ``parseString``
+
+        Example::
+
+            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
+            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
+        """
+        ParserElement.resetCache()
+        if not self.streamlined:
+            self.streamline()
+        for e in self.ignoreExprs:
+            e.streamline()
+
+        if not self.keepTabs:
+            instring = instring.expandtabs()
+        try:
+            loc, tokens = self._parse(instring, 0)
+            if parseAll:
+                loc = self.preParse(instring, loc)
+                se = Empty() + StringEnd()
+                se._parse(instring, loc)
+        except ParseBaseException as exc:
+            try:
+                if ParserElement.verbose_stacktrace:
+                    raise
+                else:
+                    if getattr(exc, "__traceback__", None) is not None:
+                        exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                    raise exc
+            finally:
+                exc = None
+                del exc
+
+        else:
+            return tokens
+
+    def scanString(self, instring, maxMatches=_MAX_INT, overlap=False):
+        """
+        Scan the input string for expression matches.  Each match will return the
+        matching tokens, start location, and end location.  May be called with optional
+        ``maxMatches`` argument, to clip scanning after 'n' matches are found.  If
+        ``overlap`` is specified, then overlapping matches will be reported.
+
+        Note that the start and end locations are reported relative to the string
+        being parsed.  See :class:`parseString` for more information on parsing
+        strings with embedded tabs.
+
+        Example::
+
+            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+            print(source)
+            for tokens, start, end in Word(alphas).scanString(source):
+                print(' '*start + '^'*(end-start))
+                print(' '*start + tokens[0])
+
+        prints::
+
+            sldjf123lsdjjkf345sldkjf879lkjsfd987
+            ^^^^^
+            sldjf
+                    ^^^^^^^
+                    lsdjjkf
+                              ^^^^^^
+                              sldkjf
+                                       ^^^^^^
+                                       lkjsfd
+        """
+        if not self.streamlined:
+            self.streamline()
+        else:
+            for e in self.ignoreExprs:
+                e.streamline()
+
+            if not self.keepTabs:
+                instring = _ustr(instring).expandtabs()
+            instrlen = len(instring)
+            loc = 0
+            preparseFn = self.preParse
+            parseFn = self._parse
+            ParserElement.resetCache()
+            matches = 0
+            try:
+                while loc <= instrlen:
+                    if matches < maxMatches:
+                        try:
+                            preloc = preparseFn(instring, loc)
+                            nextLoc, tokens = parseFn(instring, preloc, callPreParse=False)
+                        except ParseException:
+                            loc = preloc + 1
+
+                        if nextLoc > loc:
+                            matches += 1
+                            yield (tokens, preloc, nextLoc)
+                            if overlap:
+                                nextloc = preparseFn(instring, loc)
+                                if nextloc > loc:
+                                    loc = nextLoc
+                                else:
+                                    loc += 1
+                            else:
+                                loc = nextLoc
+                    else:
+                        loc = preloc + 1
+
+            except ParseBaseException as exc:
+                try:
+                    if ParserElement.verbose_stacktrace:
+                        raise
+                    else:
+                        if getattr(exc, "__traceback__", None) is not None:
+                            exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                        raise exc
+                finally:
+                    exc = None
+                    del exc
+
+    def transformString(self, instring):
+        """
+        Extension to :class:`scanString`, to modify matching text with modified tokens that may
+        be returned from a parse action.  To use ``transformString``, define a grammar and
+        attach a parse action to it that modifies the returned token list.
+        Invoking ``transformString()`` on a target string will then scan for matches,
+        and replace the matched text patterns according to the logic in the parse
+        action.  ``transformString()`` returns the resulting transformed string.
+
+        Example::
+
+            wd = Word(alphas)
+            wd.setParseAction(lambda toks: toks[0].title())
+
+            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+
+        prints::
+
+            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+        """
+        out = []
+        lastE = 0
+        self.keepTabs = True
+        try:
+            for t, s, e in self.scanString(instring):
+                out.append(instring[lastE[:s]])
+                if t:
+                    if isinstance(t, ParseResults):
+                        out += t.asList()
+                    else:
+                        if isinstance(t, list):
+                            out += t
+                        else:
+                            out.append(t)
+                lastE = e
+
+            out.append(instring[lastE[:None]])
+            out = [o for o in out if o]
+            return "".join(map(_ustr, _flatten(out)))
+        except ParseBaseException as exc:
+            try:
+                if ParserElement.verbose_stacktrace:
+                    raise
+                else:
+                    if getattr(exc, "__traceback__", None) is not None:
+                        exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                    raise exc
+            finally:
+                exc = None
+                del exc
+
+    def searchString(self, instring, maxMatches=_MAX_INT):
+        """
+        Another extension to :class:`scanString`, simplifying the access to the tokens found
+        to match the given parse expression.  May be called with optional
+        ``maxMatches`` argument, to clip searching after 'n' matches are found.
+
+        Example::
+
+            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+            cap_word = Word(alphas.upper(), alphas.lower())
+
+            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+            # the sum() builtin can be used to merge results into a single ParseResults object
+            print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
+
+        prints::
+
+            [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+            ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
+        """
+        try:
+            return ParseResults([t for t, s, e in self.scanString(instring, maxMatches)])
+        except ParseBaseException as exc:
+            try:
+                if ParserElement.verbose_stacktrace:
+                    raise
+                else:
+                    if getattr(exc, "__traceback__", None) is not None:
+                        exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                    raise exc
+            finally:
+                exc = None
+                del exc
+
+    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+        """
+        Generator method to split a string using the given expression as a separator.
+        May be called with optional ``maxsplit`` argument, to limit the number of splits;
+        and the optional ``includeSeparators`` argument (default= ``False``), if the separating
+        matching text should be included in the split results.
+
+        Example::
+
+            punc = oneOf(list(".,;:/-!?"))
+            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+
+        prints::
+
+            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+        """
+        splits = 0
+        last = 0
+        for t, s, e in self.scanString(instring, maxMatches=maxsplit):
+            yield instring[last[:s]]
+            if includeSeparators:
+                yield t[0]
+            last = e
+
+        yield instring[last[:None]]
+
+    def __add__(self, other):
+        """
+        Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement
+        converts them to :class:`Literal`s by default.
+
+        Example::
+
+            greet = Word(alphas) + "," + Word(alphas) + "!"
+            hello = "Hello, World!"
+            print (hello, "->", greet.parseString(hello))
+
+        prints::
+
+            Hello, World! -> ['Hello', ',', 'World', '!']
+
+        ``...`` may be used as a parse expression as a short form of :class:`SkipTo`.
+
+            Literal('start') + ... + Literal('end')
+
+        is equivalent to:
+
+            Literal('start') + SkipTo('end')("_skipped*") + Literal('end')
+
+        Note that the skipped text is returned with '_skipped' as a results name,
+        and to support having multiple skips in the same parser, the value returned is
+        a list of all skipped text.
+        """
+        if other is Ellipsis:
+            return _PendingSkip(self)
+        elif isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+          stacklevel=2)
+        return
+        return And([self, other])
+
+    def __radd__(self, other):
+        """
+        Implementation of + operator when left operand is not a :class:`ParserElement`
+        """
+        if other is Ellipsis:
+            return SkipTo(self)("_skipped*") + self
+        elif isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+          stacklevel=2)
+        return
+        return other + self
+
+    def __sub__(self, other):
+        """
+        Implementation of - operator, returns :class:`And` with error stop
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        else:
+            isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+              stacklevel=2)
+            return
+        return self + And._ErrorStop() + other
+
+    def __rsub__(self, other):
+        """
+        Implementation of - operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        else:
+            isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+              stacklevel=2)
+            return
+        return other - self
+
+    def __mul__(self, other):
+        """
+        Implementation of * operator, allows use of ``expr * 3`` in place of
+        ``expr + expr + expr``.  Expressions may also me multiplied by a 2-integer
+        tuple, similar to ``{min, max}`` multipliers in regular expressions.  Tuples
+        may also include ``None`` as in:
+         - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent
+              to ``expr*n + ZeroOrMore(expr)``
+              (read as "at least n instances of ``expr``")
+         - ``expr*(None, n)`` is equivalent to ``expr*(0, n)``
+              (read as "0 to n instances of ``expr``")
+         - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)``
+         - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)``
+
+        Note that ``expr*(None, n)`` does not raise an exception if
+        more than n exprs exist in the input stream; that is,
+        ``expr*(None, n)`` does not enforce a maximum number of expr
+        occurrences.  If this behavior is desired, then write
+        ``expr*(None, n) + ~expr``
+        """
+        if other is Ellipsis:
+            other = (0, None)
+        else:
+            if isinstance(other, tuple):
+                if other[None[:1]] == (Ellipsis,):
+                    other = ((0, ) + other[1[:None]] + (None, ))[None[:2]]
+            elif isinstance(other, int):
+                minElements, optElements = other, 0
+            else:
+                if isinstance(other, tuple):
+                    other = tuple((o if o is not Ellipsis else None for o in other))
+                    other = (other + (None, None))[None[:2]]
+                    if other[0] is None:
+                        other = (
+                         0, other[1])
+                    elif isinstance(other[0], int):
+                        if other[1] is None:
+                            if other[0] == 0:
+                                return ZeroOrMore(self)
+                                if other[0] == 1:
+                                    return OneOrMore(self)
+                                return self * other[0] + ZeroOrMore(self)
+                            else:
+                                pass
+                    if isinstance(other[0], int) and isinstance(other[1], int):
+                        minElements, optElements = other
+                        optElements -= minElements
+                    else:
+                        raise TypeError("cannot multiply 'ParserElement' and ('%s', '%s') objects", type(other[0]), type(other[1]))
+                else:
+                    raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+            if minElements < 0:
+                raise ValueError("cannot multiply ParserElement by negative value")
+            if optElements < 0:
+                raise ValueError("second tuple value must be greater or equal to first tuple value")
+            elif minElements == optElements == 0:
+                raise ValueError("cannot multiply ParserElement by 0 or (0, 0)")
+            elif optElements:
+
+                def makeOptionalList(n):
+                    if n > 1:
+                        return Optional(self + makeOptionalList(n - 1))
+                    return Optional(self)
+
+                if minElements:
+                    if minElements == 1:
+                        ret = self + makeOptionalList(optElements)
+                    else:
+                        ret = And([self] * minElements) + makeOptionalList(optElements)
+                else:
+                    ret = makeOptionalList(optElements)
+            else:
+                if minElements == 1:
+                    ret = self
+                else:
+                    ret = And([self] * minElements)
+            return ret
+
+    def __rmul__(self, other):
+        return self.__mul__(other)
+
+    def __or__(self, other):
+        """
+        Implementation of | operator - returns :class:`MatchFirst`
+        """
+        if other is Ellipsis:
+            return _PendingSkip(self, must_skip=True)
+        elif isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+          stacklevel=2)
+        return
+        return MatchFirst([self, other])
+
+    def __ror__(self, other):
+        """
+        Implementation of | operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        else:
+            isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+              stacklevel=2)
+            return
+        return other | self
+
+    def __xor__(self, other):
+        """
+        Implementation of ^ operator - returns :class:`Or`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        else:
+            isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+              stacklevel=2)
+            return
+        return Or([self, other])
+
+    def __rxor__(self, other):
+        """
+        Implementation of ^ operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        else:
+            isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+              stacklevel=2)
+            return
+        return other ^ self
+
+    def __and__(self, other):
+        """
+        Implementation of & operator - returns :class:`Each`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        else:
+            isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+              stacklevel=2)
+            return
+        return Each([self, other])
+
+    def __rand__(self, other):
+        """
+        Implementation of & operator when left operand is not a :class:`ParserElement`
+        """
+        if isinstance(other, basestring):
+            other = self._literalStringClass(other)
+        else:
+            isinstance(other, ParserElement) or warnings.warn(("Cannot combine element of type %s with ParserElement" % type(other)), SyntaxWarning,
+              stacklevel=2)
+            return
+        return other & self
+
+    def __invert__(self):
+        """
+        Implementation of ~ operator - returns :class:`NotAny`
+        """
+        return NotAny(self)
+
+    def __iter__(self):
+        raise TypeError("%r object is not iterable" % self.__class__.__name__)
+
+    def __getitem__(self, key):
+        """
+        use ``[]`` indexing notation as a short form for expression repetition:
+         - ``expr[n]`` is equivalent to ``expr*n``
+         - ``expr[m, n]`` is equivalent to ``expr*(m, n)``
+         - ``expr[n, ...]`` or ``expr[n,]`` is equivalent
+              to ``expr*n + ZeroOrMore(expr)``
+              (read as "at least n instances of ``expr``")
+         - ``expr[..., n]`` is equivalent to ``expr*(0, n)``
+              (read as "0 to n instances of ``expr``")
+         - ``expr[...]`` and ``expr[0, ...]`` are equivalent to ``ZeroOrMore(expr)``
+         - ``expr[1, ...]`` is equivalent to ``OneOrMore(expr)``
+         ``None`` may be used in place of ``...``.
+
+        Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception
+        if more than ``n`` ``expr``s exist in the input stream.  If this behavior is
+        desired, then write ``expr[..., n] + ~expr``.
+       """
+        try:
+            if isinstance(key, str):
+                key = (
+                 key,)
+            iter(key)
+        except TypeError:
+            key = (
+             key, key)
+
+        if len(key) > 2:
+            warnings.warn("only 1 or 2 index arguments supported ({0}{1})".format(key[None[:5]], "... [{0}]".format(len(key)) if len(key) > 5 else ""))
+        ret = self * tuple(key[None[:2]])
+        return ret
+
+    def __call__(self, name=None):
+        """
+        Shortcut for :class:`setResultsName`, with ``listAllMatches=False``.
+
+        If ``name`` is given with a trailing ``'*'`` character, then ``listAllMatches`` will be
+        passed as ``True``.
+
+        If ``name` is omitted, same as calling :class:`copy`.
+
+        Example::
+
+            # these are equivalent
+            userdata = Word(alphas).setResultsName("name") + Word(nums + "-").setResultsName("socsecno")
+            userdata = Word(alphas)("name") + Word(nums + "-")("socsecno")
+        """
+        if name is not None:
+            return self._setResultsName(name)
+        return self.copy()
+
+    def suppress(self):
+        """
+        Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from
+        cluttering up returned output.
+        """
+        return Suppress(self)
+
+    def leaveWhitespace(self):
+        """
+        Disables the skipping of whitespace before matching the characters in the
+        :class:`ParserElement`'s defined pattern.  This is normally only used internally by
+        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+        """
+        self.skipWhitespace = False
+        return self
+
+    def setWhitespaceChars(self, chars):
+        """
+        Overrides the default whitespace chars
+        """
+        self.skipWhitespace = True
+        self.whiteChars = chars
+        self.copyDefaultWhiteChars = False
+        return self
+
+    def parseWithTabs(self):
+        """
+        Overrides default behavior to expand ````s to spaces before parsing the input string.
+        Must be called before ``parseString`` when the input grammar contains elements that
+        match ```` characters.
+        """
+        self.keepTabs = True
+        return self
+
+    def ignore(self, other):
+        """
+        Define expression to be ignored (e.g., comments) while doing pattern
+        matching; may be called repeatedly, to define multiple comment or other
+        ignorable patterns.
+
+        Example::
+
+            patt = OneOrMore(Word(alphas))
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+
+            patt.ignore(cStyleComment)
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+        """
+        if isinstance(other, basestring):
+            other = Suppress(other)
+        elif isinstance(other, Suppress):
+            if other not in self.ignoreExprs:
+                self.ignoreExprs.append(other)
+        else:
+            self.ignoreExprs.append(Suppress(other.copy()))
+        return self
+
+    def setDebugActions(self, startAction, successAction, exceptionAction):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        """
+        self.debugActions = (
+         startAction or _defaultStartDebugAction,
+         successAction or _defaultSuccessDebugAction,
+         exceptionAction or _defaultExceptionDebugAction)
+        self.debug = True
+        return self
+
+    def setDebug(self, flag=True):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        Set ``flag`` to True to enable, False to disable.
+
+        Example::
+
+            wd = Word(alphas).setName("alphaword")
+            integer = Word(nums).setName("numword")
+            term = wd | integer
+
+            # turn on debugging for wd
+            wd.setDebug()
+
+            OneOrMore(term).parseString("abc 123 xyz 890")
+
+        prints::
+
+            Match alphaword at loc 0(1,1)
+            Matched alphaword -> ['abc']
+            Match alphaword at loc 3(1,4)
+            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+            Match alphaword at loc 7(1,8)
+            Matched alphaword -> ['xyz']
+            Match alphaword at loc 11(1,12)
+            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+            Match alphaword at loc 15(1,16)
+            Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+        The output shown is that produced by the default debug actions - custom debug actions can be
+        specified using :class:`setDebugActions`. Prior to attempting
+        to match the ``wd`` expression, the debugging message ``"Match  at loc (,)"``
+        is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"``
+        message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression,
+        which makes debugging and exception messages easier to understand - for instance, the default
+        name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``.
+        """
+        if flag:
+            self.setDebugActions(_defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction)
+        else:
+            self.debug = False
+        return self
+
+    def __str__(self):
+        return self.name
+
+    def __repr__(self):
+        return _ustr(self)
+
+    def streamline(self):
+        self.streamlined = True
+        self.strRepr = None
+        return self
+
+    def checkRecursion(self, parseElementList):
+        pass
+
+    def validate(self, validateTrace=None):
+        """
+        Check defined expressions for valid structure, check for infinite recursive definitions.
+        """
+        self.checkRecursion([])
+
+    def parseFile(self, file_or_filename, parseAll=False):
+        """
+        Execute the parse expression on the given file or filename.
+        If a filename is specified (instead of a file object),
+        the entire file is opened, read, and closed before parsing.
+        """
+        try:
+            file_contents = file_or_filename.read()
+        except AttributeError:
+            with open(file_or_filename, "r") as f:
+                file_contents = f.read()
+
+        try:
+            return self.parseString(file_contents, parseAll)
+        except ParseBaseException as exc:
+            try:
+                if ParserElement.verbose_stacktrace:
+                    raise
+                else:
+                    if getattr(exc, "__traceback__", None) is not None:
+                        exc.__traceback__ = self._trim_traceback(exc.__traceback__)
+                    raise exc
+            finally:
+                exc = None
+                del exc
+
+    def __eq__(self, other):
+        if self is other:
+            return True
+        if isinstance(other, basestring):
+            return self.matches(other)
+        if isinstance(other, ParserElement):
+            return vars(self) == vars(other)
+        return False
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __hash__(self):
+        return id(self)
+
+    def __req__(self, other):
+        return self == other
+
+    def __rne__(self, other):
+        return not self == other
+
+    def matches(self, testString, parseAll=True):
+        """
+        Method for quick testing of a parser against a test string. Good for simple
+        inline microtests of sub expressions while building up larger parser.
+
+        Parameters:
+         - testString - to test against this expression for a match
+         - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
+
+        Example::
+
+            expr = Word(nums)
+            assert expr.matches("100")
+        """
+        try:
+            self.parseString((_ustr(testString)), parseAll=parseAll)
+            return True
+        except ParseBaseException:
+            return False
+
+    def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False, postParse=None, file=None):
+        r"""
+        Execute the parse expression on a series of test strings, showing each
+        test, the parsed results or where the parse failed. Quick and easy way to
+        run a parse expression against a list of sample strings.
+
+        Parameters:
+         - tests - a list of separate test strings, or a multiline string of test strings
+         - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
+         - comment - (default= ``'#'``) - expression for indicating embedded comments in the test
+              string; pass None to disable comment filtering
+         - fullDump - (default= ``True``) - dump results as list followed by results names in nested outline;
+              if False, only dump nested list
+         - printResults - (default= ``True``) prints test output to stdout
+         - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing
+         - postParse - (default= ``None``) optional callback for successful parse results; called as
+              `fn(test_string, parse_results)` and returns a string to be added to the test output
+         - file - (default=``None``) optional file-like object to which test output will be written;
+              if None, will default to ``sys.stdout``
+
+        Returns: a (success, results) tuple, where success indicates that all tests succeeded
+        (or failed if ``failureTests`` is True), and the results contain a list of lines of each
+        test's output
+
+        Example::
+
+            number_expr = pyparsing_common.number.copy()
+
+            result = number_expr.runTests('''
+                # unsigned integer
+                100
+                # negative integer
+                -100
+                # float with scientific notation
+                6.02e23
+                # integer with scientific notation
+                1e-12
+                ''')
+            print("Success" if result[0] else "Failed!")
+
+            result = number_expr.runTests('''
+                # stray character
+                100Z
+                # missing leading digit before '.'
+                -.100
+                # too many '.'
+                3.14.159
+                ''', failureTests=True)
+            print("Success" if result[0] else "Failed!")
+
+        prints::
+
+            # unsigned integer
+            100
+            [100]
+
+            # negative integer
+            -100
+            [-100]
+
+            # float with scientific notation
+            6.02e23
+            [6.02e+23]
+
+            # integer with scientific notation
+            1e-12
+            [1e-12]
+
+            Success
+
+            # stray character
+            100Z
+               ^
+            FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+            # missing leading digit before '.'
+            -.100
+            ^
+            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+            # too many '.'
+            3.14.159
+                ^
+            FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+            Success
+
+        Each test string must be on a single line. If you want to test a string that spans multiple
+        lines, create a test like this::
+
+            expr.runTest(r"this is a test\n of strings that spans \n 3 lines")
+
+        (Note that this is a raw string literal, you must include the leading 'r'.)
+        """
+        if isinstance(tests, basestring):
+            tests = list(map(str.strip, tests.rstrip().splitlines()))
+        if isinstance(comment, basestring):
+            comment = Literal(comment)
+        if file is None:
+            file = sys.stdout
+        print_ = file.write
+        allResults = []
+        comments = []
+        success = True
+        NL = Literal("\\n").addParseAction(replaceWith("\n")).ignore(quotedString)
+        BOM = "\ufeff"
+        for t in tests:
+            if not (comment is not None and comment.matches(t, False)):
+                if comments:
+                    if not t:
+                        comments.append(t)
+                        continue
+                if not t:
+                    continue
+            out = [
+             "\n" + "\n".join(comments) if comments else "", t]
+            comments = []
+            try:
+                t = NL.transformString(t.lstrip(BOM))
+                result = self.parseString(t, parseAll=parseAll)
+            except ParseBaseException as pe:
+                try:
+                    fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+                    if "\n" in t:
+                        out.append(line(pe.loc, t))
+                        out.append(" " * (col(pe.loc, t) - 1) + "^" + fatal)
+                    else:
+                        out.append(" " * pe.loc + "^" + fatal)
+                    out.append("FAIL: " + str(pe))
+                    success = success and failureTests
+                    result = pe
+                finally:
+                    pe = None
+                    del pe
+
+            except Exception as exc:
+                try:
+                    out.append("FAIL-EXCEPTION: " + str(exc))
+                    success = success and failureTests
+                    result = exc
+                finally:
+                    exc = None
+                    del exc
+
+            else:
+                success = success and not failureTests
+                if postParse is not None:
+                    try:
+                        pp_value = postParse(t, result)
+                        if pp_value is not None:
+                            if isinstance(pp_value, ParseResults):
+                                out.append(pp_value.dump())
+                            else:
+                                out.append(str(pp_value))
+                        else:
+                            out.append(result.dump())
+                    except Exception as e:
+                        try:
+                            out.append(result.dump(full=fullDump))
+                            out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e))
+                        finally:
+                            e = None
+                            del e
+
+                else:
+                    out.append(result.dump(full=fullDump))
+            if printResults:
+                if fullDump:
+                    out.append("")
+                print_("\n".join(out))
+            allResults.append((t, result))
+
+        return (success, allResults)
+
+
+class _PendingSkip(ParserElement):
+
+    def __init__(self, expr, must_skip=False):
+        super(_PendingSkip, self).__init__()
+        self.strRepr = str(expr + Empty()).replace("Empty", "...")
+        self.name = self.strRepr
+        self.anchor = expr
+        self.must_skip = must_skip
+
+    def __add__(self, other):
+        skipper = SkipTo(other).setName("...")("_skipped*")
+        if self.must_skip:
+
+            def must_skip(t):
+                if not t._skipped or t._skipped.asList() == [""]:
+                    del t[0]
+                    t.pop("_skipped", None)
+
+            def show_skip(t):
+                if t._skipped.asList()[(-1)[:None]] == [""]:
+                    skipped = t.pop("_skipped")
+                    t["_skipped"] = "missing <" + repr(self.anchor) + ">"
+
+            return (self.anchor + skipper().addParseAction(must_skip) | skipper().addParseAction(show_skip)) + other
+        return self.anchor + skipper + other
+
+    def __repr__(self):
+        return self.strRepr
+
+    def parseImpl(self, *args):
+        raise Exception("use of `...` expression without following SkipTo target expression")
+
+
+class Token(ParserElement):
+    __doc__ = "Abstract :class:`ParserElement` subclass, for defining atomic\n    matching patterns.\n    "
+
+    def __init__(self):
+        super(Token, self).__init__(savelist=False)
+
+
+class Empty(Token):
+    __doc__ = "An empty token, will always match.\n    "
+
+    def __init__(self):
+        super(Empty, self).__init__()
+        self.name = "Empty"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+
+class NoMatch(Token):
+    __doc__ = "A token that will never match.\n    "
+
+    def __init__(self):
+        super(NoMatch, self).__init__()
+        self.name = "NoMatch"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.errmsg = "Unmatchable token"
+
+    def parseImpl(self, instring, loc, doActions=True):
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+    __doc__ = 'Token to exactly match a specified string.\n\n    Example::\n\n        Literal(\'blah\').parseString(\'blah\')  # -> [\'blah\']\n        Literal(\'blah\').parseString(\'blahfooblah\')  # -> [\'blah\']\n        Literal(\'blah\').parseString(\'bla\')  # -> Exception: Expected "blah"\n\n    For case-insensitive matching, use :class:`CaselessLiteral`.\n\n    For keyword matching (force word break before and after the matched string),\n    use :class:`Keyword` or :class:`CaselessKeyword`.\n    '
+
+    def __init__(self, matchString):
+        super(Literal, self).__init__()
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Literal; use Empty() instead", SyntaxWarning,
+              stacklevel=2)
+            self.__class__ = Empty
+
+        self.name = '"%s"' % _ustr(self.match)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+        if self.matchLen == 1:
+            if type(self) is Literal:
+                self.__class__ = _SingleCharLiteral
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] == self.firstMatchChar:
+            if instring.startswith(self.match, loc):
+                return (
+                 loc + self.matchLen, self.match)
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class _SingleCharLiteral(Literal):
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] == self.firstMatchChar:
+            return (
+             loc + 1, self.match)
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+    __doc__ = 'Token to exactly match a specified string as a keyword, that is,\n    it must be immediately followed by a non-keyword character.  Compare\n    with :class:`Literal`:\n\n     - ``Literal("if")`` will match the leading ``\'if\'`` in\n       ``\'ifAndOnlyIf\'``.\n     - ``Keyword("if")`` will not; it will only match the leading\n       ``\'if\'`` in ``\'if x=1\'``, or ``\'if(y==2)\'``\n\n    Accepts two optional constructor arguments in addition to the\n    keyword string:\n\n     - ``identChars`` is a string of characters that would be valid\n       identifier characters, defaulting to all alphanumerics + "_" and\n       "$"\n     - ``caseless`` allows case-insensitive matching, default is ``False``.\n\n    Example::\n\n        Keyword("start").parseString("start")  # -> [\'start\']\n        Keyword("start").parseString("starting")  # -> Exception\n\n    For case-insensitive matching, use :class:`CaselessKeyword`.\n    '
+    DEFAULT_KEYWORD_CHARS = alphanums + "_$"
+
+    def __init__(self, matchString, identChars=None, caseless=False):
+        super(Keyword, self).__init__()
+        if identChars is None:
+            identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Keyword; use Empty() instead", SyntaxWarning,
+              stacklevel=2)
+
+        self.name = '"%s"' % self.match
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+        self.caseless = caseless
+        if caseless:
+            self.caselessmatch = matchString.upper()
+            identChars = identChars.upper()
+        self.identChars = set(identChars)
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if self.caseless:
+            if not instring[loc[:loc + self.matchLen]].upper() == self.caselessmatch or loc >= len(instring) - self.matchLen or instring[loc + self.matchLen].upper() not in self.identChars:
+                if loc == 0 or instring[loc - 1].upper() not in self.identChars:
+                    return (
+                     loc + self.matchLen, self.match)
+        elif instring[loc] == self.firstMatchChar:
+            if self.matchLen == 1 or instring.startswith(self.match, loc):
+                if loc >= len(instring) - self.matchLen or instring[loc + self.matchLen] not in self.identChars:
+                    if loc == 0 or instring[loc - 1] not in self.identChars:
+                        return (
+                         loc + self.matchLen, self.match)
+        raise ParseException(instring, loc, self.errmsg, self)
+
+    def copy(self):
+        c = super(Keyword, self).copy()
+        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        return c
+
+    @staticmethod
+    def setDefaultKeywordChars(chars):
+        """Overrides the default Keyword chars
+        """
+        Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+
+class CaselessLiteral(Literal):
+    __doc__ = 'Token to match a specified string, ignoring case of letters.\n    Note: the matched results will always be in the case of the given\n    match string, NOT the case of the input text.\n\n    Example::\n\n        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> [\'CMD\', \'CMD\', \'CMD\']\n\n    (Contrast with example for :class:`CaselessKeyword`.)\n    '
+
+    def __init__(self, matchString):
+        super(CaselessLiteral, self).__init__(matchString.upper())
+        self.returnString = matchString
+        self.name = "'%s'" % self.returnString
+        self.errmsg = "Expected " + self.name
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc[:loc + self.matchLen]].upper() == self.match:
+            return (
+             loc + self.matchLen, self.returnString)
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class CaselessKeyword(Keyword):
+    __doc__ = '\n    Caseless version of :class:`Keyword`.\n\n    Example::\n\n        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> [\'CMD\', \'CMD\']\n\n    (Contrast with example for :class:`CaselessLiteral`.)\n    '
+
+    def __init__(self, matchString, identChars=None):
+        super(CaselessKeyword, self).__init__(matchString, identChars, caseless=True)
+
+
+class CloseMatch(Token):
+    __doc__ = 'A variation on :class:`Literal` which matches "close" matches,\n    that is, strings with at most \'n\' mismatching characters.\n    :class:`CloseMatch` takes parameters:\n\n     - ``match_string`` - string to be matched\n     - ``maxMismatches`` - (``default=1``) maximum number of\n       mismatches allowed to count as a match\n\n    The results from a successful parse will contain the matched text\n    from the input string and the following named results:\n\n     - ``mismatches`` - a list of the positions within the\n       match_string where mismatches were found\n     - ``original`` - the original match_string used to compare\n       against the input string\n\n    If ``mismatches`` is an empty list, then the match was an exact\n    match.\n\n    Example::\n\n        patt = CloseMatch("ATCATCGAATGGA")\n        patt.parseString("ATCATCGAAXGGA") # -> ([\'ATCATCGAAXGGA\'], {\'mismatches\': [[9]], \'original\': [\'ATCATCGAATGGA\']})\n        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected \'ATCATCGAATGGA\' (with up to 1 mismatches) (at char 0), (line:1, col:1)\n\n        # exact match\n        patt.parseString("ATCATCGAATGGA") # -> ([\'ATCATCGAATGGA\'], {\'mismatches\': [[]], \'original\': [\'ATCATCGAATGGA\']})\n\n        # close match allowing up to 2 mismatches\n        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)\n        patt.parseString("ATCAXCGAAXGGA") # -> ([\'ATCAXCGAAXGGA\'], {\'mismatches\': [[4, 9]], \'original\': [\'ATCATCGAATGGA\']})\n    '
+
+    def __init__(self, match_string, maxMismatches=1):
+        super(CloseMatch, self).__init__()
+        self.name = match_string
+        self.match_string = match_string
+        self.maxMismatches = maxMismatches
+        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+        self.mayIndexError = False
+        self.mayReturnEmpty = False
+
+    def parseImpl(self, instring, loc, doActions=True):
+        start = loc
+        instrlen = len(instring)
+        maxloc = start + len(self.match_string)
+        if maxloc <= instrlen:
+            match_string = self.match_string
+            match_stringloc = 0
+            mismatches = []
+            maxMismatches = self.maxMismatches
+            for match_stringloc, s_m in enumerate(zip(instring[loc[:maxloc]], match_string)):
+                src, mat = s_m
+                if src != mat:
+                    mismatches.append(match_stringloc)
+                    if len(mismatches) > maxMismatches:
+                        break
+            else:
+                loc = match_stringloc + 1
+                results = ParseResults([instring[start[:loc]]])
+                results["original"] = match_string
+                results["mismatches"] = mismatches
+                return (loc, results)
+
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+    __doc__ = 'Token for matching words composed of allowed character sets.\n    Defined with string containing all allowed initial characters, an\n    optional string containing allowed body characters (if omitted,\n    defaults to the initial character set), and an optional minimum,\n    maximum, and/or exact length.  The default value for ``min`` is\n    1 (a minimum value < 1 is not valid); the default values for\n    ``max`` and ``exact`` are 0, meaning no maximum or exact\n    length restriction. An optional ``excludeChars`` parameter can\n    list characters that might be found in the input ``bodyChars``\n    string; useful to define a word of all printables except for one or\n    two characters, for instance.\n\n    :class:`srange` is useful for defining custom character set strings\n    for defining ``Word`` expressions, using range notation from\n    regular expression character sets.\n\n    A common mistake is to use :class:`Word` to match a specific literal\n    string, as in ``Word("Address")``. Remember that :class:`Word`\n    uses the string argument to define *sets* of matchable characters.\n    This expression would match "Add", "AAA", "dAred", or any other word\n    made up of the characters \'A\', \'d\', \'r\', \'e\', and \'s\'. To match an\n    exact literal string, use :class:`Literal` or :class:`Keyword`.\n\n    pyparsing includes helper strings for building Words:\n\n     - :class:`alphas`\n     - :class:`nums`\n     - :class:`alphanums`\n     - :class:`hexnums`\n     - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255\n       - accented, tilded, umlauted, etc.)\n     - :class:`punc8bit` (non-alphabetic characters in ASCII range\n       128-255 - currency, symbols, superscripts, diacriticals, etc.)\n     - :class:`printables` (any non-whitespace character)\n\n    Example::\n\n        # a word composed of digits\n        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))\n\n        # a word with a leading capital, and zero or more lowercase\n        capital_word = Word(alphas.upper(), alphas.lower())\n\n        # hostnames are alphanumeric, with leading alpha, and \'-\'\n        hostname = Word(alphas, alphanums + \'-\')\n\n        # roman numeral (not a strict parser, accepts invalid mix of characters)\n        roman = Word("IVXLCDM")\n\n        # any string of non-whitespace characters, except for \',\'\n        csv_value = Word(printables, excludeChars=",")\n    '
+
+    def __init__(self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None):
+        super(Word, self).__init__()
+        if excludeChars:
+            excludeChars = set(excludeChars)
+            initChars = "".join((c for c in initChars if c not in excludeChars))
+            if bodyChars:
+                bodyChars = "".join((c for c in bodyChars if c not in excludeChars))
+            else:
+                self.initCharsOrig = initChars
+                self.initChars = set(initChars)
+                if bodyChars:
+                    self.bodyCharsOrig = bodyChars
+                    self.bodyChars = set(bodyChars)
+                else:
+                    self.bodyCharsOrig = initChars
+                    self.bodyChars = set(initChars)
+                self.maxSpecified = max > 0
+                if min < 1:
+                    raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+                self.minLen = min
+                if max > 0:
+                    self.maxLen = max
+                else:
+                    self.maxLen = _MAX_INT
+            if exact > 0:
+                self.maxLen = exact
+                self.minLen = exact
+            self.name = _ustr(self)
+            self.errmsg = "Expected " + self.name
+            self.mayIndexError = False
+            self.asKeyword = asKeyword
+            if " " not in self.initCharsOrig + self.bodyCharsOrig and min == 1 and max == 0 and exact == 0:
+                if self.bodyCharsOrig == self.initCharsOrig:
+                    self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+        elif len(self.initCharsOrig) == 1:
+            self.reString = "%s[%s]*" % (re.escape(self.initCharsOrig),
+             _escapeRegexRangeChars(self.bodyCharsOrig))
+        else:
+            self.reString = "[%s][%s]*" % (_escapeRegexRangeChars(self.initCharsOrig),
+             _escapeRegexRangeChars(self.bodyCharsOrig))
+        if self.asKeyword:
+            self.reString = "\\b" + self.reString + "\\b"
+        try:
+            self.re = re.compile(self.reString)
+        except Exception:
+            self.re = None
+        else:
+            self.re_match = self.re.match
+            self.__class__ = _WordRegex
+
+    def parseImpl(self, instring, loc, doActions=True):
+        if instring[loc] not in self.initChars:
+            raise ParseException(instring, loc, self.errmsg, self)
+        start = loc
+        loc += 1
+        instrlen = len(instring)
+        bodychars = self.bodyChars
+        maxloc = start + self.maxLen
+        maxloc = min(maxloc, instrlen)
+        while loc < maxloc and instring[loc] in bodychars:
+            loc += 1
+
+        throwException = False
+        if loc - start < self.minLen:
+            throwException = True
+        else:
+            if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+                throwException = True
+            else:
+                if self.asKeyword:
+                    if start > 0 and instring[start - 1] in bodychars or loc < instrlen:
+                        if instring[loc] in bodychars:
+                            throwException = True
+        if throwException:
+            raise ParseException(instring, loc, self.errmsg, self)
+        return (loc, instring[start[:loc]])
+
+    def __str__(self):
+        try:
+            return super(Word, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+
+            def charsAsStr(s):
+                if len(s) > 4:
+                    return s[None[:4]] + "..."
+                return s
+
+            if self.initCharsOrig != self.bodyCharsOrig:
+                self.strRepr = "W:(%s, %s)" % (charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig))
+            else:
+                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+        return self.strRepr
+
+
+class _WordRegex(Word):
+
+    def parseImpl(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+        loc = result.end()
+        return (loc, result.group())
+
+
+class Char(_WordRegex):
+    __doc__ = "A short-cut class for defining ``Word(characters, exact=1)``,\n    when defining a match of any single character in a string of\n    characters.\n    "
+
+    def __init__(self, charset, asKeyword=False, excludeChars=None):
+        super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars)
+        self.reString = "[%s]" % _escapeRegexRangeChars("".join(self.initChars))
+        if asKeyword:
+            self.reString = "\\b%s\\b" % self.reString
+        self.re = re.compile(self.reString)
+        self.re_match = self.re.match
+
+
+class Regex(Token):
+    __doc__ = 'Token for matching strings that match a given regular\n    expression. Defined with string specifying the regular expression in\n    a form recognized by the stdlib Python  `re module `_.\n    If the given regex contains named groups (defined using ``(?P...)``),\n    these will be preserved as named parse results.\n\n    If instead of the Python stdlib re module you wish to use a different RE module\n    (such as the `regex` module), you can replace it by either building your\n    Regex object with a compiled RE that was compiled using regex:\n\n    Example::\n\n        realnum = Regex(r"[+-]?\\d+\\.\\d*")\n        date = Regex(r\'(?P\\d{4})-(?P\\d\\d?)-(?P\\d\\d?)\')\n        # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression\n        roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")\n\n        # use regex module instead of stdlib re module to construct a Regex using\n        # a compiled regular expression\n        import regex\n        parser = pp.Regex(regex.compile(r\'[0-9]\'))\n\n    '
+
+    def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False):
+        """The parameters ``pattern`` and ``flags`` are passed
+        to the ``re.compile()`` function as-is. See the Python
+        `re module `_ module for an
+        explanation of the acceptable patterns and flags.
+        """
+        super(Regex, self).__init__()
+        if isinstance(pattern, basestring):
+            if not pattern:
+                warnings.warn("null string passed to Regex; use Empty() instead", SyntaxWarning,
+                  stacklevel=2)
+            self.pattern = pattern
+            self.flags = flags
+            try:
+                self.re = re.compile(self.pattern, self.flags)
+                self.reString = self.pattern
+            except sre_constants.error:
+                warnings.warn(("invalid pattern (%s) passed to Regex" % pattern), SyntaxWarning,
+                  stacklevel=2)
+                raise
+
+        else:
+            if hasattr(pattern, "pattern") and hasattr(pattern, "match"):
+                self.re = pattern
+                self.pattern = self.reString = pattern.pattern
+                self.flags = flags
+            else:
+                raise TypeError("Regex may only be constructed with a string or a compiled RE object")
+        self.re_match = self.re.match
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = self.re_match("") is not None
+        self.asGroupList = asGroupList
+        self.asMatch = asMatch
+        if self.asGroupList:
+            self.parseImpl = self.parseImplAsGroupList
+        if self.asMatch:
+            self.parseImpl = self.parseImplAsMatch
+
+    def parseImpl(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+        loc = result.end()
+        ret = ParseResults(result.group())
+        d = result.groupdict()
+        if d:
+            for k, v in d.items():
+                ret[k] = v
+
+        return (
+         loc, ret)
+
+    def parseImplAsGroupList(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+        loc = result.end()
+        ret = result.groups()
+        return (loc, ret)
+
+    def parseImplAsMatch(self, instring, loc, doActions=True):
+        result = self.re_match(instring, loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+        loc = result.end()
+        ret = result
+        return (loc, ret)
+
+    def __str__(self):
+        try:
+            return super(Regex, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "Re:(%s)" % repr(self.pattern)
+        return self.strRepr
+
+    def sub(self, repl):
+        r"""
+        Return Regex with an attached parse action to transform the parsed
+        result as if called using `re.sub(expr, repl, string) `_.
+
+        Example::
+
+            make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2")
+            print(make_html.transformString("h1:main title:"))
+            # prints "

main title

" + """ + if self.asGroupList: + warnings.warn("cannot use sub() with Regex(asGroupList=True)", SyntaxWarning, + stacklevel=2) + raise SyntaxError() + elif self.asMatch: + if callable(repl): + warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", SyntaxWarning, + stacklevel=2) + raise SyntaxError() + if self.asMatch: + + def pa(tokens): + return tokens[0].expand(repl) + + else: + + def pa(tokens): + return self.re.sub(repl, tokens[0]) + + return self.addParseAction(pa) + + +class QuotedString(Token): + __doc__ = '\n Token for matching strings that are delimited by quoting characters.\n\n Defined with the following parameters:\n\n - quoteChar - string of one or more characters defining the\n quote delimiting string\n - escChar - character to escape quotes, typically backslash\n (default= ``None``)\n - escQuote - special quote sequence to escape an embedded quote\n string (such as SQL\'s ``""`` to escape an embedded ``"``)\n (default= ``None``)\n - multiline - boolean indicating whether quotes can span\n multiple lines (default= ``False``)\n - unquoteResults - boolean indicating whether the matched text\n should be unquoted (default= ``True``)\n - endQuoteChar - string of one or more characters defining the\n end of the quote delimited string (default= ``None`` => same as\n quoteChar)\n - convertWhitespaceEscapes - convert escaped whitespace\n (``\'\\t\'``, ``\'\\n\'``, etc.) to actual whitespace\n (default= ``True``)\n\n Example::\n\n qs = QuotedString(\'"\')\n print(qs.searchString(\'lsjdf "This is the quote" sldjf\'))\n complex_qs = QuotedString(\'{{\', endQuoteChar=\'}}\')\n print(complex_qs.searchString(\'lsjdf {{This is the "quote"}} sldjf\'))\n sql_qs = QuotedString(\'"\', escQuote=\'""\')\n print(sql_qs.searchString(\'lsjdf "This is the quote with ""embedded"" quotes" sldjf\'))\n\n prints::\n\n [[\'This is the quote\']]\n [[\'This is the "quote"\']]\n [[\'This is the quote with "embedded" quotes\']]\n ' + + def __init__(self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString, self).__init__() + quoteChar = quoteChar.strip() + if not quoteChar: + warnings.warn("quoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) + raise SyntaxError() + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if not endQuoteChar: + warnings.warn("endQuoteChar cannot be the empty string", SyntaxWarning, stacklevel=2) + raise SyntaxError() + else: + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + self.convertWhitespaceEscapes = convertWhitespaceEscapes + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = "%s(?:[^%s%s]" % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + escChar is not None and _escapeRegexRangeChars(escChar) or "") + else: + self.flags = 0 + self.pattern = "%s(?:[^%s\\n\\r%s]" % (re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + escChar is not None and _escapeRegexRangeChars(escChar) or "") + if len(self.endQuoteChar) > 1: + self.pattern += "|(?:" + ")|(?:".join(("%s[^%s]" % (re.escape(self.endQuoteChar[None[:i]]), _escapeRegexRangeChars(self.endQuoteChar[i])) for i in range(len(self.endQuoteChar) - 1, 0, -1))) + ")" + if escQuote: + self.pattern += "|(?:%s)" % re.escape(escQuote) + if escChar: + self.pattern += "|(?:%s.)" % re.escape(escChar) + self.escCharReplacePattern = re.escape(self.escChar) + "(.)" + self.pattern += ")*%s" % re.escape(self.endQuoteChar) + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + self.re_match = self.re.match + except sre_constants.error: + warnings.warn(("invalid pattern (%s) passed to Regex" % self.pattern), SyntaxWarning, + stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + result = instring[loc] == self.firstQuoteChar and self.re_match(instring, loc) or None + if not result: + raise ParseException(instring, loc, self.errmsg, self) + loc = result.end() + ret = result.group() + if self.unquoteResults: + ret = ret[self.quoteCharLen[:-self.endQuoteCharLen]] + if isinstance(ret, basestring): + if "\\" in ret: + if self.convertWhitespaceEscapes: + ws_map = { + '\\t': '"\\t"', + '\\n': '"\\n"', + '\\f': '"\\x0c"', + '\\r': '"\\r"'} + for wslit, wschar in ws_map.items(): + ret = ret.replace(wslit, wschar) + + if self.escChar: + ret = re.sub(self.escCharReplacePattern, "\\g<1>", ret) + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + return ( + loc, ret) + + def __str__(self): + try: + return super(QuotedString, self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + return self.strRepr + + +class CharsNotIn(Token): + __doc__ = 'Token for matching words composed of characters *not* in a given\n set (will include whitespace in matched characters if not listed in\n the provided exclusion set - see example). Defined with string\n containing all disallowed characters, and an optional minimum,\n maximum, and/or exact length. The default value for ``min`` is\n 1 (a minimum value < 1 is not valid); the default values for\n ``max`` and ``exact`` are 0, meaning no maximum or exact\n length restriction.\n\n Example::\n\n # define a comma-separated-value as anything that is not a \',\'\n csv_value = CharsNotIn(\',\')\n print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))\n\n prints::\n\n [\'dkls\', \'lsdkjf\', \'s12 34\', \'@!#\', \'213\']\n ' + + def __init__(self, notChars, min=1, max=0, exact=0): + super(CharsNotIn, self).__init__() + self.skipWhitespace = False + self.notChars = notChars + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + else: + self.minLen = min + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + if exact > 0: + self.maxLen = exact + self.minLen = exact + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = self.minLen == 0 + self.mayIndexError = False + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] in self.notChars: + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + notchars = self.notChars + maxlen = min(start + self.maxLen, len(instring)) + while loc < maxlen and instring[loc] not in notchars: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + return (loc, instring[start[:loc]]) + + def __str__(self): + try: + return super(CharsNotIn, self).__str__() + except Exception: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[None[:4]] + else: + self.strRepr = "!W:(%s)" % self.notChars + return self.strRepr + + +class White(Token): + __doc__ = 'Special matching class for matching whitespace. Normally,\n whitespace is ignored by pyparsing grammars. This class is included\n when some whitespace structures are significant. Define with\n a string containing the whitespace characters to be matched; default\n is ``" \\t\\r\\n"``. Also takes optional ``min``,\n ``max``, and ``exact`` arguments, as defined for the\n :class:`Word` class.\n ' + whiteStrs = { + ' ': '""', + '\t': '""', + '\n': '""', + '\r': '""', + '\x0c': '""', + '\xa0': '""', + '\u1680': '""', + '\u180e': '""', + '\u2000': '""', + '\u2001': '""', + '\u2002': '""', + '\u2003': '""', + '\u2004': '""', + '\u2005': '""', + '\u2006': '""', + '\u2007': '""', + '\u2008': '""', + '\u2009': '""', + '\u200a': '""', + '\u200b': '""', + '\u202f': '""', + '\u205f': '""', + '\u3000': '""'} + + def __init__(self, ws=' \t\r\n', min=1, max=0, exact=0): + super(White, self).__init__() + self.matchWhite = ws + self.setWhitespaceChars("".join((c for c in self.whiteChars if c not in self.matchWhite))) + self.name = "".join((White.whiteStrs[c] for c in self.matchWhite)) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + self.minLen = min + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl(self, instring, loc, doActions=True): + if instring[loc] not in self.matchWhite: + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min(maxloc, len(instring)) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + return (loc, instring[start[:loc]]) + + +class _PositionToken(Token): + + def __init__(self): + super(_PositionToken, self).__init__() + self.name = self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + + +class GoToColumn(_PositionToken): + __doc__ = "Token to advance to a specific column of input text; useful for\n tabular report scraping.\n " + + def __init__(self, colno): + super(GoToColumn, self).__init__() + self.col = colno + + def preParse(self, instring, loc): + if col(loc, instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables(instring, loc) + while loc < instrlen and instring[loc].isspace() and col(loc, instring) != self.col: + loc += 1 + + return loc + + def parseImpl(self, instring, loc, doActions=True): + thiscol = col(loc, instring) + if thiscol > self.col: + raise ParseException(instring, loc, "Text not in expected column", self) + newloc = loc + self.col - thiscol + ret = instring[loc[:newloc]] + return (newloc, ret) + + +class LineStart(_PositionToken): + __doc__ = "Matches if current position is at the beginning of a line within\n the parse string\n\n Example::\n\n test = '''\\\n AAA this line\n AAA and this line\n AAA but not this one\n B AAA and definitely not this one\n '''\n\n for t in (LineStart() + 'AAA' + restOfLine).searchString(test):\n print(t)\n\n prints::\n\n ['AAA', ' this line']\n ['AAA', ' and this line']\n\n " + + def __init__(self): + super(LineStart, self).__init__() + self.errmsg = "Expected start of line" + + def parseImpl(self, instring, loc, doActions=True): + if col(loc, instring) == 1: + return ( + loc, []) + raise ParseException(instring, loc, self.errmsg, self) + + +class LineEnd(_PositionToken): + __doc__ = "Matches if current position is at the end of a line within the\n parse string\n " + + def __init__(self): + super(LineEnd, self).__init__() + self.setWhitespaceChars(ParserElement.DEFAULT_WHITE_CHARS.replace("\n", "")) + self.errmsg = "Expected end of line" + + def parseImpl(self, instring, loc, doActions=True): + if loc < len(instring): + if instring[loc] == "\n": + return ( + loc + 1, "\n") + raise ParseException(instring, loc, self.errmsg, self) + else: + if loc == len(instring): + return ( + loc + 1, []) + raise ParseException(instring, loc, self.errmsg, self) + + +class StringStart(_PositionToken): + __doc__ = "Matches if current position is at the beginning of the parse\n string\n " + + def __init__(self): + super(StringStart, self).__init__() + self.errmsg = "Expected start of text" + + def parseImpl(self, instring, loc, doActions=True): + if loc != 0: + if loc != self.preParse(instring, 0): + raise ParseException(instring, loc, self.errmsg, self) + return ( + loc, []) + + +class StringEnd(_PositionToken): + __doc__ = "Matches if current position is at the end of the parse string\n " + + def __init__(self): + super(StringEnd, self).__init__() + self.errmsg = "Expected end of text" + + def parseImpl(self, instring, loc, doActions=True): + if loc < len(instring): + raise ParseException(instring, loc, self.errmsg, self) + else: + if loc == len(instring): + return ( + loc + 1, []) + if loc > len(instring): + return ( + loc, []) + raise ParseException(instring, loc, self.errmsg, self) + + +class WordStart(_PositionToken): + __doc__ = "Matches if the current position is at the beginning of a Word,\n and is not preceded by any character in a given set of\n ``wordChars`` (default= ``printables``). To emulate the\n ``\x08`` behavior of regular expressions, use\n ``WordStart(alphanums)``. ``WordStart`` will also match at\n the beginning of the string being parsed, or at the beginning of\n a line.\n " + + def __init__(self, wordChars=printables): + super(WordStart, self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True): + if loc != 0: + if instring[loc - 1] in self.wordChars or instring[loc] not in self.wordChars: + raise ParseException(instring, loc, self.errmsg, self) + return ( + loc, []) + + +class WordEnd(_PositionToken): + __doc__ = "Matches if the current position is at the end of a Word, and is\n not followed by any character in a given set of ``wordChars``\n (default= ``printables``). To emulate the ``\x08`` behavior of\n regular expressions, use ``WordEnd(alphanums)``. ``WordEnd``\n will also match at the end of the string being parsed, or at the end\n of a line.\n " + + def __init__(self, wordChars=printables): + super(WordEnd, self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True): + instrlen = len(instring) + if instrlen > 0: + if loc < instrlen: + if instring[loc] in self.wordChars or instring[loc - 1] not in self.wordChars: + raise ParseException(instring, loc, self.errmsg, self) + return ( + loc, []) + + +class ParseExpression(ParserElement): + __doc__ = "Abstract subclass of ParserElement, for combining and\n post-processing parsed tokens.\n " + + def __init__(self, exprs, savelist=False): + super(ParseExpression, self).__init__(savelist) + if isinstance(exprs, _generatorType): + exprs = list(exprs) + if isinstance(exprs, basestring): + self.exprs = [ + self._literalStringClass(exprs)] + else: + if isinstance(exprs, ParserElement): + self.exprs = [ + exprs] + else: + if isinstance(exprs, Iterable): + exprs = list(exprs) + if any((isinstance(expr, basestring) for expr in exprs)): + exprs = (self._literalStringClass(e) if isinstance(e, basestring) else e for e in exprs) + self.exprs = list(exprs) + else: + try: + self.exprs = list(exprs) + except TypeError: + self.exprs = [ + exprs] + + self.callPreparse = False + + def append(self, other): + self.exprs.append(other) + self.strRepr = None + return self + + def leaveWhitespace(self): + """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on + all contained expressions.""" + self.skipWhitespace = False + self.exprs = [e.copy() for e in self.exprs] + for e in self.exprs: + e.leaveWhitespace() + + return self + + def ignore(self, other): + if isinstance(other, Suppress): + if other not in self.ignoreExprs: + super(ParseExpression, self).ignore(other) + for e in self.exprs: + e.ignore(self.ignoreExprs[-1]) + + else: + super(ParseExpression, self).ignore(other) + for e in self.exprs: + e.ignore(self.ignoreExprs[-1]) + + return self + + def __str__(self): + try: + return super(ParseExpression, self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.exprs)) + return self.strRepr + + def streamline(self): + super(ParseExpression, self).streamline() + for e in self.exprs: + e.streamline() + + if len(self.exprs) == 2: + other = self.exprs[0] + if isinstance(other, self.__class__): + if not other.parseAction: + if other.resultsName is None: + if not other.debug: + self.exprs = other.exprs[None[:None]] + [self.exprs[1]] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + other = self.exprs[-1] + if isinstance(other, self.__class__) and not other.parseAction: + if other.resultsName is None: + if not other.debug: + self.exprs = self.exprs[None[:-1]] + other.exprs[None[:None]] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + self.errmsg = "Expected " + _ustr(self) + return self + + def validate(self, validateTrace=None): + tmp = (validateTrace if validateTrace is not None else [])[None[:None]] + [self] + for e in self.exprs: + e.validate(tmp) + + self.checkRecursion([]) + + def copy(self): + ret = super(ParseExpression, self).copy() + ret.exprs = [e.copy() for e in self.exprs] + return ret + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in self.exprs: + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn(("{0}: setting results name {1!r} on {2} expression collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", name, type(self).__name__, e.resultsName)), + stacklevel=3) + + return super(ParseExpression, self)._setResultsName(name, listAllMatches) + + +class And(ParseExpression): + __doc__ = '\n Requires all given :class:`ParseExpression` s to be found in the given order.\n Expressions may be separated by whitespace.\n May be constructed using the ``\'+\'`` operator.\n May also be constructed using the ``\'-\'`` operator, which will\n suppress backtracking.\n\n Example::\n\n integer = Word(nums)\n name_expr = OneOrMore(Word(alphas))\n\n expr = And([integer("id"), name_expr("name"), integer("age")])\n # more easily written as:\n expr = integer("id") + name_expr("name") + integer("age")\n ' + + class _ErrorStop(Empty): + + def __init__(self, *args, **kwargs): + (super(And._ErrorStop, self).__init__)(*args, **kwargs) + self.name = "-" + self.leaveWhitespace() + + def __init__(self, exprs, savelist=True): + exprs = list(exprs) + if exprs: + if Ellipsis in exprs: + tmp = [] + for i, expr in enumerate(exprs): + if expr is Ellipsis: + if i < len(exprs) - 1: + skipto_arg = (Empty() + exprs[i + 1]).exprs[-1] + tmp.append(SkipTo(skipto_arg)("_skipped*")) + else: + raise Exception("cannot construct And with sequence ending in ...") + else: + tmp.append(expr) + + exprs[None[:None]] = tmp + super(And, self).__init__(exprs, savelist) + self.mayReturnEmpty = all((e.mayReturnEmpty for e in self.exprs)) + self.setWhitespaceChars(self.exprs[0].whiteChars) + self.skipWhitespace = self.exprs[0].skipWhitespace + self.callPreparse = True + + def streamline(self): + if self.exprs: + if any((isinstance(e, ParseExpression) and e.exprs and isinstance(e.exprs[-1], _PendingSkip) for e in self.exprs[None[:-1]])): + for i, e in enumerate(self.exprs[None[:-1]]): + if e is None: + continue + if isinstance(e, ParseExpression) and e.exprs and isinstance(e.exprs[-1], _PendingSkip): + e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] + self.exprs[i + 1] = None + + self.exprs = [e for e in self.exprs if e is not None] + super(And, self).streamline() + self.mayReturnEmpty = all((e.mayReturnEmpty for e in self.exprs)) + return self + + def parseImpl(self, instring, loc, doActions=True): + loc, resultlist = self.exprs[0]._parse(instring, loc, doActions, callPreParse=False) + errorStop = False + for e in self.exprs[1[:None]]: + if isinstance(e, And._ErrorStop): + errorStop = True + continue + if errorStop: + try: + loc, exprtokens = e._parse(instring, loc, doActions) + except ParseSyntaxException: + raise + except ParseBaseException as pe: + try: + pe.__traceback__ = None + raise ParseSyntaxException._from_exception(pe) + finally: + pe = None + del pe + + except IndexError: + raise ParseSyntaxException(instring, len(instring), self.errmsg, self) + + else: + loc, exprtokens = e._parse(instring, loc, doActions) + if exprtokens or exprtokens.haskeys(): + resultlist += exprtokens + + return ( + loc, resultlist) + + def __iadd__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[None[:None]] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + if not e.mayReturnEmpty: + break + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is None: + self.strRepr = "{" + " ".join((_ustr(e) for e in self.exprs)) + "}" + return self.strRepr + + +class Or(ParseExpression): + __doc__ = 'Requires that at least one :class:`ParseExpression` is found. If\n two expressions match, the expression that matches the longest\n string will be used. May be constructed using the ``\'^\'``\n operator.\n\n Example::\n\n # construct Or using \'^\' operator\n\n number = Word(nums) ^ Combine(Word(nums) + \'.\' + Word(nums))\n print(number.searchString("123 3.1416 789"))\n\n prints::\n\n [[\'123\'], [\'3.1416\'], [\'789\']]\n ' + + def __init__(self, exprs, savelist=False): + super(Or, self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any((e.mayReturnEmpty for e in self.exprs)) + else: + self.mayReturnEmpty = True + + def streamline(self): + super(Or, self).streamline() + if __compat__.collect_all_And_tokens: + self.saveAsList = any((e.saveAsList for e in self.exprs)) + return self + + def parseImpl(self, instring, loc, doActions=True): + maxExcLoc = -1 + maxException = None + matches = [] + for e in self.exprs: + try: + loc2 = e.tryParse(instring, loc) + except ParseException as err: + try: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + finally: + err = None + del err + + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring, len(instring), e.errmsg, self) + maxExcLoc = len(instring) + else: + matches.append((loc2, e)) + + if matches: + matches.sort(key=(itemgetter(0)), reverse=True) + if not doActions: + best_expr = matches[0][1] + return best_expr._parse(instring, loc, doActions) + else: + longest = (-1, None) + for loc1, expr1 in matches: + if loc1 <= longest[0]: + return longest + try: + loc2, toks = expr1._parse(instring, loc, doActions) + except ParseException as err: + try: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + finally: + err = None + del err + + else: + if loc2 >= loc1: + return ( + loc2, toks) + if loc2 > longest[0]: + longest = ( + loc2, toks) + + if longest != (-1, None): + return longest + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ixor__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join((_ustr(e) for e in self.exprs)) + "}" + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[None[:None]] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if not __compat__.collect_all_And_tokens: + if __diag__.warn_multiple_tokens_in_named_alternation: + if any((isinstance(e, And) for e in self.exprs)): + warnings.warn(("{0}: setting results name {1!r} on {2} expression may only return a single token for an And alternative, in future will return the full list of tokens".format("warn_multiple_tokens_in_named_alternation", name, type(self).__name__)), + stacklevel=3) + return super(Or, self)._setResultsName(name, listAllMatches) + + +class MatchFirst(ParseExpression): + __doc__ = 'Requires that at least one :class:`ParseExpression` is found. If\n two expressions match, the first one listed is the one that will\n match. May be constructed using the ``\'|\'`` operator.\n\n Example::\n\n # construct MatchFirst using \'|\' operator\n\n # watch the order of expressions to match\n number = Word(nums) | Combine(Word(nums) + \'.\' + Word(nums))\n print(number.searchString("123 3.1416 789")) # Fail! -> [[\'123\'], [\'3\'], [\'1416\'], [\'789\']]\n\n # put more selective expression first\n number = Combine(Word(nums) + \'.\' + Word(nums)) | Word(nums)\n print(number.searchString("123 3.1416 789")) # Better -> [[\'123\'], [\'3.1416\'], [\'789\']]\n ' + + def __init__(self, exprs, savelist=False): + super(MatchFirst, self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any((e.mayReturnEmpty for e in self.exprs)) + else: + self.mayReturnEmpty = True + + def streamline(self): + super(MatchFirst, self).streamline() + if __compat__.collect_all_And_tokens: + self.saveAsList = any((e.saveAsList for e in self.exprs)) + return self + + def parseImpl(self, instring, loc, doActions=True): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse(instring, loc, doActions) + return ret + except ParseException as err: + try: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + finally: + err = None + del err + + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring, len(instring), e.errmsg, self) + maxExcLoc = len(instring) + + else: + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + return self.append(other) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is None: + self.strRepr = "{" + " | ".join((_ustr(e) for e in self.exprs)) + "}" + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[None[:None]] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + def _setResultsName(self, name, listAllMatches=False): + if not __compat__.collect_all_And_tokens: + if __diag__.warn_multiple_tokens_in_named_alternation: + if any((isinstance(e, And) for e in self.exprs)): + warnings.warn(("{0}: setting results name {1!r} on {2} expression may only return a single token for an And alternative, in future will return the full list of tokens".format("warn_multiple_tokens_in_named_alternation", name, type(self).__name__)), + stacklevel=3) + return super(MatchFirst, self)._setResultsName(name, listAllMatches) + + +class Each(ParseExpression): + __doc__ = 'Requires all given :class:`ParseExpression` s to be found, but in\n any order. Expressions may be separated by whitespace.\n\n May be constructed using the ``\'&\'`` operator.\n\n Example::\n\n color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")\n shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")\n integer = Word(nums)\n shape_attr = "shape:" + shape_type("shape")\n posn_attr = "posn:" + Group(integer("x") + \',\' + integer("y"))("posn")\n color_attr = "color:" + color("color")\n size_attr = "size:" + integer("size")\n\n # use Each (using operator \'&\') to accept attributes in any order\n # (shape and posn are required, color and size are optional)\n shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)\n\n shape_spec.runTests(\'\'\'\n shape: SQUARE color: BLACK posn: 100, 120\n shape: CIRCLE size: 50 color: BLUE posn: 50,80\n color:GREEN size:20 shape:TRIANGLE posn:20,40\n \'\'\'\n )\n\n prints::\n\n shape: SQUARE color: BLACK posn: 100, 120\n [\'shape:\', \'SQUARE\', \'color:\', \'BLACK\', \'posn:\', [\'100\', \',\', \'120\']]\n - color: BLACK\n - posn: [\'100\', \',\', \'120\']\n - x: 100\n - y: 120\n - shape: SQUARE\n\n\n shape: CIRCLE size: 50 color: BLUE posn: 50,80\n [\'shape:\', \'CIRCLE\', \'size:\', \'50\', \'color:\', \'BLUE\', \'posn:\', [\'50\', \',\', \'80\']]\n - color: BLUE\n - posn: [\'50\', \',\', \'80\']\n - x: 50\n - y: 80\n - shape: CIRCLE\n - size: 50\n\n\n color: GREEN size: 20 shape: TRIANGLE posn: 20,40\n [\'color:\', \'GREEN\', \'size:\', \'20\', \'shape:\', \'TRIANGLE\', \'posn:\', [\'20\', \',\', \'40\']]\n - color: GREEN\n - posn: [\'20\', \',\', \'40\']\n - x: 20\n - y: 40\n - shape: TRIANGLE\n - size: 20\n ' + + def __init__(self, exprs, savelist=True): + super(Each, self).__init__(exprs, savelist) + self.mayReturnEmpty = all((e.mayReturnEmpty for e in self.exprs)) + self.skipWhitespace = True + self.initExprGroups = True + self.saveAsList = True + + def streamline(self): + super(Each, self).streamline() + self.mayReturnEmpty = all((e.mayReturnEmpty for e in self.exprs)) + return self + + def parseImpl(self, instring, loc, doActions=True): + if self.initExprGroups: + self.opt1map = dict(((id(e.expr), e) for e in self.exprs if isinstance(e, Optional))) + opt1 = [e.expr for e in self.exprs if isinstance(e, Optional)] + opt2 = [e for e in self.exprs if e.mayReturnEmpty if not isinstance(e, (Optional, Regex))] + self.optionals = opt1 + opt2 + self.multioptionals = [e.expr for e in self.exprs if isinstance(e, ZeroOrMore)] + self.multirequired = [e.expr for e in self.exprs if isinstance(e, OneOrMore)] + self.required = [e for e in self.exprs if not isinstance(e, (Optional, ZeroOrMore, OneOrMore))] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[None[:None]] + tmpOpt = self.optionals[None[:None]] + matchOrder = [] + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse(instring, tmpLoc) + except ParseException: + failed.append(e) + else: + matchOrder.append(self.opt1map.get(id(e), e)) + if e in tmpReqd: + tmpReqd.remove(e) + if e in tmpOpt: + tmpOpt.remove(e) + + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join((_ustr(e) for e in tmpReqd)) + raise ParseException(instring, loc, "Missing one or more required elements (%s)" % missing) + matchOrder += [e for e in self.exprs if isinstance(e, Optional) if e.expr in tmpOpt] + resultlist = [] + for e in matchOrder: + loc, results = e._parse(instring, loc, doActions) + resultlist.append(results) + + finalResults = sum(resultlist, ParseResults([])) + return (loc, finalResults) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is None: + self.strRepr = "{" + " & ".join((_ustr(e) for e in self.exprs)) + "}" + return self.strRepr + + def checkRecursion(self, parseElementList): + subRecCheckList = parseElementList[None[:None]] + [self] + for e in self.exprs: + e.checkRecursion(subRecCheckList) + + +class ParseElementEnhance(ParserElement): + __doc__ = "Abstract subclass of :class:`ParserElement`, for combining and\n post-processing parsed tokens.\n " + + def __init__(self, expr, savelist=False): + super(ParseElementEnhance, self).__init__(savelist) + if isinstance(expr, basestring): + if issubclass(self._literalStringClass, Token): + expr = self._literalStringClass(expr) + else: + expr = self._literalStringClass(Literal(expr)) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars(expr.whiteChars) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl(self, instring, loc, doActions=True): + if self.expr is not None: + return self.expr._parse(instring, loc, doActions, callPreParse=False) + raise ParseException("", loc, self.errmsg, self) + + def leaveWhitespace(self): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore(self, other): + if isinstance(other, Suppress): + if other not in self.ignoreExprs: + super(ParseElementEnhance, self).ignore(other) + if self.expr is not None: + self.expr.ignore(self.ignoreExprs[-1]) + else: + super(ParseElementEnhance, self).ignore(other) + if self.expr is not None: + self.expr.ignore(self.ignoreExprs[-1]) + return self + + def streamline(self): + super(ParseElementEnhance, self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion(self, parseElementList): + if self in parseElementList: + raise RecursiveGrammarException(parseElementList + [self]) + subRecCheckList = parseElementList[None[:None]] + [self] + if self.expr is not None: + self.expr.checkRecursion(subRecCheckList) + + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + tmp = validateTrace[None[:None]] + [self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__(self): + try: + return super(ParseElementEnhance, self).__str__() + except Exception: + pass + + if self.strRepr is None: + if self.expr is not None: + self.strRepr = "%s:(%s)" % (self.__class__.__name__, _ustr(self.expr)) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + __doc__ = 'Lookahead matching of the given parse expression.\n ``FollowedBy`` does *not* advance the parsing position within\n the input string, it only verifies that the specified parse\n expression matches at the current position. ``FollowedBy``\n always returns a null token list. If any results names are defined\n in the lookahead expression, those *will* be returned for access by\n name.\n\n Example::\n\n # use FollowedBy to match a label only if it is followed by a \':\'\n data_word = Word(alphas)\n label = data_word + FollowedBy(\':\')\n attr_expr = Group(label + Suppress(\':\') + OneOrMore(data_word, stopOn=label).setParseAction(\' \'.join))\n\n OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()\n\n prints::\n\n [[\'shape\', \'SQUARE\'], [\'color\', \'BLACK\'], [\'posn\', \'upper left\']]\n ' + + def __init__(self, expr): + super(FollowedBy, self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + _, ret = self.expr._parse(instring, loc, doActions=doActions) + del ret[None[:None]] + return ( + loc, ret) + + +class PrecededBy(ParseElementEnhance): + __doc__ = 'Lookbehind matching of the given parse expression.\n ``PrecededBy`` does not advance the parsing position within the\n input string, it only verifies that the specified parse expression\n matches prior to the current position. ``PrecededBy`` always\n returns a null token list, but if a results name is defined on the\n given expression, it is returned.\n\n Parameters:\n\n - expr - expression that must match prior to the current parse\n location\n - retreat - (default= ``None``) - (int) maximum number of characters\n to lookbehind prior to the current parse location\n\n If the lookbehind expression is a string, Literal, Keyword, or\n a Word or CharsNotIn with a specified exact or maximum length, then\n the retreat parameter is not required. Otherwise, retreat must be\n specified to give a maximum number of characters to look back from\n the current parse position for a lookbehind match.\n\n Example::\n\n # VB-style variable names with type prefixes\n int_var = PrecededBy("#") + pyparsing_common.identifier\n str_var = PrecededBy("$") + pyparsing_common.identifier\n\n ' + + def __init__(self, expr, retreat=None): + super(PrecededBy, self).__init__(expr) + self.expr = self.expr().leaveWhitespace() + self.mayReturnEmpty = True + self.mayIndexError = False + self.exact = False + if isinstance(expr, str): + retreat = len(expr) + self.exact = True + else: + if isinstance(expr, (Literal, Keyword)): + retreat = expr.matchLen + self.exact = True + else: + if isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT: + retreat = expr.maxLen + self.exact = True + else: + if isinstance(expr, _PositionToken): + retreat = 0 + self.exact = True + self.retreat = retreat + self.errmsg = "not preceded by " + str(expr) + self.skipWhitespace = False + self.parseAction.append(lambda s, l, t: t.__delitem__(slice(None, None))) + + def parseImpl(self, instring, loc=0, doActions=True): + if self.exact: + if loc < self.retreat: + raise ParseException(instring, loc, self.errmsg) + start = loc - self.retreat + _, ret = self.expr._parse(instring, start) + else: + test_expr = self.expr + StringEnd() + instring_slice = instring[max(0, loc - self.retreat)[:loc]] + last_expr = ParseException(instring, loc, self.errmsg) + for offset in range(1, min(loc, self.retreat + 1) + 1): + try: + _, ret = test_expr._parse(instring_slice, len(instring_slice) - offset) + except ParseBaseException as pbe: + try: + last_expr = pbe + finally: + pbe = None + del pbe + + else: + break + else: + raise last_expr + + return ( + loc, ret) + + +class NotAny(ParseElementEnhance): + __doc__ = 'Lookahead to disallow matching with the given parse expression.\n ``NotAny`` does *not* advance the parsing position within the\n input string, it only verifies that the specified parse expression\n does *not* match at the current position. Also, ``NotAny`` does\n *not* skip over leading whitespace. ``NotAny`` always returns\n a null token list. May be constructed using the \'~\' operator.\n\n Example::\n\n AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split())\n\n # take care not to mistake keywords for identifiers\n ident = ~(AND | OR | NOT) + Word(alphas)\n boolean_term = Optional(NOT) + ident\n\n # very crude boolean expression - to support parenthesis groups and\n # operation hierarchy, use infixNotation\n boolean_expr = boolean_term + ZeroOrMore((AND | OR) + boolean_term)\n\n # integers that are followed by "." are actually floats\n integer = Word(nums) + ~Char(".")\n ' + + def __init__(self, expr): + super(NotAny, self).__init__(expr) + self.skipWhitespace = False + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, " + _ustr(self.expr) + + def parseImpl(self, instring, loc, doActions=True): + if self.expr.canParseNext(instring, loc): + raise ParseException(instring, loc, self.errmsg, self) + return ( + loc, []) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + return self.strRepr + + +class _MultipleMatch(ParseElementEnhance): + + def __init__(self, expr, stopOn=None): + super(_MultipleMatch, self).__init__(expr) + self.saveAsList = True + ender = stopOn + if isinstance(ender, basestring): + ender = self._literalStringClass(ender) + self.stopOn(ender) + + def stopOn(self, ender): + if isinstance(ender, basestring): + ender = self._literalStringClass(ender) + self.not_ender = ~ender if ender is not None else None + return self + + def parseImpl(self, instring, loc, doActions=True): + self_expr_parse = self.expr._parse + self_skip_ignorables = self._skipIgnorables + check_ender = self.not_ender is not None + if check_ender: + try_not_ender = self.not_ender.tryParse + if check_ender: + try_not_ender(instring, loc) + loc, tokens = self_expr_parse(instring, loc, doActions, callPreParse=False) + try: + hasIgnoreExprs = not not self.ignoreExprs + while 1: + if check_ender: + try_not_ender(instring, loc) + elif hasIgnoreExprs: + preloc = self_skip_ignorables(instring, loc) + else: + preloc = loc + loc, tmptokens = self_expr_parse(instring, preloc, doActions) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + + except (ParseException, IndexError): + pass + + return ( + loc, tokens) + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_ungrouped_named_tokens_in_collection: + for e in [self.expr] + getattr(self.expr, "exprs", []): + if isinstance(e, ParserElement) and e.resultsName: + warnings.warn(("{0}: setting results name {1!r} on {2} expression collides with {3!r} on contained expression".format("warn_ungrouped_named_tokens_in_collection", name, type(self).__name__, e.resultsName)), + stacklevel=3) + + return super(_MultipleMatch, self)._setResultsName(name, listAllMatches) + + +class OneOrMore(_MultipleMatch): + __doc__ = 'Repetition of one or more of the given expression.\n\n Parameters:\n - expr - expression that must match one or more times\n - stopOn - (default= ``None``) - expression for a terminating sentinel\n (only required if the sentinel would ordinarily match the repetition\n expression)\n\n Example::\n\n data_word = Word(alphas)\n label = data_word + FollowedBy(\':\')\n attr_expr = Group(label + Suppress(\':\') + OneOrMore(data_word).setParseAction(\' \'.join))\n\n text = "shape: SQUARE posn: upper left color: BLACK"\n OneOrMore(attr_expr).parseString(text).pprint() # Fail! read \'color\' as data instead of next label -> [[\'shape\', \'SQUARE color\']]\n\n # use stopOn attribute for OneOrMore to avoid reading label string as part of the data\n attr_expr = Group(label + Suppress(\':\') + OneOrMore(data_word, stopOn=label).setParseAction(\' \'.join))\n OneOrMore(attr_expr).parseString(text).pprint() # Better -> [[\'shape\', \'SQUARE\'], [\'posn\', \'upper left\'], [\'color\', \'BLACK\']]\n\n # could also be written as\n (attr_expr * (1,)).parseString(text).pprint()\n ' + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + return self.strRepr + + +class ZeroOrMore(_MultipleMatch): + __doc__ = "Optional repetition of zero or more of the given expression.\n\n Parameters:\n - expr - expression that must match zero or more times\n - stopOn - (default= ``None``) - expression for a terminating sentinel\n (only required if the sentinel would ordinarily match the repetition\n expression)\n\n Example: similar to :class:`OneOrMore`\n " + + def __init__(self, expr, stopOn=None): + super(ZeroOrMore, self).__init__(expr, stopOn=stopOn) + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + try: + return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) + except (ParseException, IndexError): + return ( + loc, []) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + return self.strRepr + + +class _NullToken(object): + + def __bool__(self): + return False + + __nonzero__ = __bool__ + + def __str__(self): + return "" + + +class Optional(ParseElementEnhance): + __doc__ = "Optional matching of the given expression.\n\n Parameters:\n - expr - expression that must match zero or more times\n - default (optional) - value to be returned if the optional expression is not found.\n\n Example::\n\n # US postal code can be a 5-digit zip, plus optional 4-digit qualifier\n zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))\n zip.runTests('''\n # traditional ZIP code\n 12345\n\n # ZIP+4 form\n 12101-0001\n\n # invalid ZIP\n 98765-\n ''')\n\n prints::\n\n # traditional ZIP code\n 12345\n ['12345']\n\n # ZIP+4 form\n 12101-0001\n ['12101-0001']\n\n # invalid ZIP\n 98765-\n ^\n FAIL: Expected end of text (at char 5), (line:1, col:6)\n " + _Optional__optionalNotMatched = _NullToken() + + def __init__(self, expr, default=_Optional__optionalNotMatched): + super(Optional, self).__init__(expr, savelist=False) + self.saveAsList = self.expr.saveAsList + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl(self, instring, loc, doActions=True): + try: + loc, tokens = self.expr._parse(instring, loc, doActions, callPreParse=False) + except (ParseException, IndexError): + if self.defaultValue is not self._Optional__optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([self.defaultValue]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [ + self.defaultValue] + else: + tokens = [] + + return ( + loc, tokens) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + return self.strRepr + + +class SkipTo(ParseElementEnhance): + __doc__ = 'Token for skipping over all undefined text until the matched\n expression is found.\n\n Parameters:\n - expr - target expression marking the end of the data to be skipped\n - include - (default= ``False``) if True, the target expression is also parsed\n (the skipped text and target expression are returned as a 2-element list).\n - ignore - (default= ``None``) used to define grammars (typically quoted strings and\n comments) that might contain false matches to the target expression\n - failOn - (default= ``None``) define expressions that are not allowed to be\n included in the skipped test; if found before the target expression is found,\n the SkipTo is not a match\n\n Example::\n\n report = \'\'\'\n Outstanding Issues Report - 1 Jan 2000\n\n # | Severity | Description | Days Open\n -----+----------+-------------------------------------------+-----------\n 101 | Critical | Intermittent system crash | 6\n 94 | Cosmetic | Spelling error on Login (\'log|n\') | 14\n 79 | Minor | System slow when running too many reports | 47\n \'\'\'\n integer = Word(nums)\n SEP = Suppress(\'|\')\n # use SkipTo to simply match everything up until the next SEP\n # - ignore quoted strings, so that a \'|\' character inside a quoted string does not match\n # - parse action will call token.strip() for each matched token, i.e., the description body\n string_data = SkipTo(SEP, ignore=quotedString)\n string_data.setParseAction(tokenMap(str.strip))\n ticket_expr = (integer("issue_num") + SEP\n + string_data("sev") + SEP\n + string_data("desc") + SEP\n + integer("days_open"))\n\n for tkt in ticket_expr.searchString(report):\n print tkt.dump()\n\n prints::\n\n [\'101\', \'Critical\', \'Intermittent system crash\', \'6\']\n - days_open: 6\n - desc: Intermittent system crash\n - issue_num: 101\n - sev: Critical\n [\'94\', \'Cosmetic\', "Spelling error on Login (\'log|n\')", \'14\']\n - days_open: 14\n - desc: Spelling error on Login (\'log|n\')\n - issue_num: 94\n - sev: Cosmetic\n [\'79\', \'Minor\', \'System slow when running too many reports\', \'47\']\n - days_open: 47\n - desc: System slow when running too many reports\n - issue_num: 79\n - sev: Minor\n ' + + def __init__(self, other, include=False, ignore=None, failOn=None): + super(SkipTo, self).__init__(other) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.saveAsList = False + if isinstance(failOn, basestring): + self.failOn = self._literalStringClass(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for " + _ustr(self.expr) + + def parseImpl(self, instring, loc, doActions=True): + startloc = loc + instrlen = len(instring) + expr = self.expr + expr_parse = self.expr._parse + self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None + self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None + tmploc = loc + while 1: + if tmploc <= instrlen: + if self_failOn_canParseNext is not None: + if self_failOn_canParseNext(instring, tmploc): + break + elif self_ignoreExpr_tryParse is not None: + while True: + try: + tmploc = self_ignoreExpr_tryParse(instring, tmploc) + except ParseBaseException: + break + + try: + expr_parse(instring, tmploc, doActions=False, callPreParse=False) + except (ParseException, IndexError): + tmploc += 1 + + break + else: + raise ParseException(instring, loc, self.errmsg, self) + + loc = tmploc + skiptext = instring[startloc[:loc]] + skipresult = ParseResults(skiptext) + if self.includeMatch: + loc, mat = expr_parse(instring, loc, doActions, callPreParse=False) + skipresult += mat + return (loc, skipresult) + + +class Forward(ParseElementEnhance): + __doc__ = "Forward declaration of an expression to be defined later -\n used for recursive grammars, such as algebraic infix notation.\n When the expression is known, it is assigned to the ``Forward``\n variable using the '<<' operator.\n\n Note: take care when assigning to ``Forward`` not to overlook\n precedence of operators.\n\n Specifically, '|' has a lower precedence than '<<', so that::\n\n fwdExpr << a | b | c\n\n will actually be evaluated as::\n\n (fwdExpr << a) | b | c\n\n thereby leaving b and c out as parseable alternatives. It is recommended that you\n explicitly group the values inserted into the ``Forward``::\n\n fwdExpr << (a | b | c)\n\n Converting to use the '<<=' operator instead will avoid this problem.\n\n See :class:`ParseResults.pprint` for an example of a recursive\n parser created using ``Forward``.\n " + + def __init__(self, other=None): + super(Forward, self).__init__(other, savelist=False) + + def __lshift__(self, other): + if isinstance(other, basestring): + other = self._literalStringClass(other) + self.expr = other + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars(self.expr.whiteChars) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return self + + def __ilshift__(self, other): + return self << other + + def leaveWhitespace(self): + self.skipWhitespace = False + return self + + def streamline(self): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate(self, validateTrace=None): + if validateTrace is None: + validateTrace = [] + if self not in validateTrace: + tmp = validateTrace[None[:None]] + [self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__(self): + if hasattr(self, "name"): + return self.name + if self.strRepr is not None: + return self.strRepr + self.strRepr = ": ..." + retString = "..." + try: + if self.expr is not None: + retString = _ustr(self.expr)[None[:1000]] + else: + retString = "None" + finally: + self.strRepr = self.__class__.__name__ + ": " + retString + + return self.strRepr + + def copy(self): + if self.expr is not None: + return super(Forward, self).copy() + ret = Forward() + ret <<= self + return ret + + def _setResultsName(self, name, listAllMatches=False): + if __diag__.warn_name_set_on_empty_Forward: + if self.expr is None: + warnings.warn(("{0}: setting results name {0!r} on {1} expression that has no contained expression".format("warn_name_set_on_empty_Forward", name, type(self).__name__)), + stacklevel=3) + return super(Forward, self)._setResultsName(name, listAllMatches) + + +class TokenConverter(ParseElementEnhance): + __doc__ = "\n Abstract subclass of :class:`ParseExpression`, for converting parsed results.\n " + + def __init__(self, expr, savelist=False): + super(TokenConverter, self).__init__(expr) + self.saveAsList = False + + +class Combine(TokenConverter): + __doc__ = "Converter to concatenate all matching tokens to a single string.\n By default, the matching patterns must also be contiguous in the\n input string; this can be disabled by specifying\n ``'adjacent=False'`` in the constructor.\n\n Example::\n\n real = Word(nums) + '.' + Word(nums)\n print(real.parseString('3.1416')) # -> ['3', '.', '1416']\n # will also erroneously match the following\n print(real.parseString('3. 1416')) # -> ['3', '.', '1416']\n\n real = Combine(Word(nums) + '.' + Word(nums))\n print(real.parseString('3.1416')) # -> ['3.1416']\n # no match when there are internal spaces\n print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)\n " + + def __init__(self, expr, joinString='', adjacent=True): + super(Combine, self).__init__(expr) + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore(self, other): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super(Combine, self).ignore(other) + return self + + def postParse(self, instring, loc, tokenlist): + retToks = tokenlist.copy() + del retToks[None[:None]] + retToks += ParseResults(["".join(tokenlist._asStringList(self.joinString))], modal=(self.modalResults)) + if self.resultsName: + if retToks.haskeys(): + return [ + retToks] + return retToks + + +class Group(TokenConverter): + __doc__ = 'Converter to return the matched tokens as a list - useful for\n returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions.\n\n Example::\n\n ident = Word(alphas)\n num = Word(nums)\n term = ident | num\n func = ident + Optional(delimitedList(term))\n print(func.parseString("fn a, b, 100")) # -> [\'fn\', \'a\', \'b\', \'100\']\n\n func = ident + Group(Optional(delimitedList(term)))\n print(func.parseString("fn a, b, 100")) # -> [\'fn\', [\'a\', \'b\', \'100\']]\n ' + + def __init__(self, expr): + super(Group, self).__init__(expr) + self.saveAsList = True + + def postParse(self, instring, loc, tokenlist): + return [ + tokenlist] + + +class Dict(TokenConverter): + __doc__ = 'Converter to return a repetitive expression as a list, but also\n as a dictionary. Each element can also be referenced using the first\n token in the expression as its key. Useful for tabular report\n scraping when the first column can be used as a item key.\n\n Example::\n\n data_word = Word(alphas)\n label = data_word + FollowedBy(\':\')\n attr_expr = Group(label + Suppress(\':\') + OneOrMore(data_word).setParseAction(\' \'.join))\n\n text = "shape: SQUARE posn: upper left color: light blue texture: burlap"\n attr_expr = (label + Suppress(\':\') + OneOrMore(data_word, stopOn=label).setParseAction(\' \'.join))\n\n # print attributes as plain groups\n print(OneOrMore(attr_expr).parseString(text).dump())\n\n # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names\n result = Dict(OneOrMore(Group(attr_expr))).parseString(text)\n print(result.dump())\n\n # access named fields as dict entries, or output as dict\n print(result[\'shape\'])\n print(result.asDict())\n\n prints::\n\n [\'shape\', \'SQUARE\', \'posn\', \'upper left\', \'color\', \'light blue\', \'texture\', \'burlap\']\n [[\'shape\', \'SQUARE\'], [\'posn\', \'upper left\'], [\'color\', \'light blue\'], [\'texture\', \'burlap\']]\n - color: light blue\n - posn: upper left\n - shape: SQUARE\n - texture: burlap\n SQUARE\n {\'color\': \'light blue\', \'posn\': \'upper left\', \'texture\': \'burlap\', \'shape\': \'SQUARE\'}\n\n See more examples at :class:`ParseResults` of accessing fields by results name.\n ' + + def __init__(self, expr): + super(Dict, self).__init__(expr) + self.saveAsList = True + + def postParse(self, instring, loc, tokenlist): + for i, tok in enumerate(tokenlist): + if len(tok) == 0: + continue + else: + ikey = tok[0] + if isinstance(ikey, int): + ikey = _ustr(tok[0]).strip() + if len(tok) == 1: + tokenlist[ikey] = _ParseResultsWithOffset("", i) + if len(tok) == 2: + if not isinstance(tok[1], ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1], i) + dictvalue = tok.copy() + del dictvalue[0] + if not len(dictvalue) != 1: + if isinstance(dictvalue, ParseResults): + if dictvalue.haskeys(): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue, i) + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0], i) + + if self.resultsName: + return [ + tokenlist] + return tokenlist + + +class Suppress(TokenConverter): + __doc__ = 'Converter for ignoring the results of a parsed expression.\n\n Example::\n\n source = "a, b, c,d"\n wd = Word(alphas)\n wd_list1 = wd + ZeroOrMore(\',\' + wd)\n print(wd_list1.parseString(source))\n\n # often, delimiters that are useful during parsing are just in the\n # way afterward - use Suppress to keep them out of the parsed output\n wd_list2 = wd + ZeroOrMore(Suppress(\',\') + wd)\n print(wd_list2.parseString(source))\n\n prints::\n\n [\'a\', \',\', \'b\', \',\', \'c\', \',\', \'d\']\n [\'a\', \'b\', \'c\', \'d\']\n\n (See also :class:`delimitedList`.)\n ' + + def postParse(self, instring, loc, tokenlist): + return [] + + def suppress(self): + return self + + +class OnlyOnce(object): + __doc__ = "Wrapper for parse actions, to ensure they are only called once.\n " + + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + + def __call__(self, s, l, t): + if not self.called: + results = self.callable(s, l, t) + self.called = True + return results + raise ParseException(s, l, "") + + def reset(self): + self.called = False + + +def traceParseAction(f): + """Decorator for debugging parse actions. + + When the parse action is called, this decorator will print + ``">> entering method-name(line:, , )"``. + When the parse action completes, the decorator will print + ``"<<"`` followed by the returned value, or any exception that the parse action raised. + + Example:: + + wd = Word(alphas) + + @traceParseAction + def remove_duplicate_chars(tokens): + return ''.join(sorted(set(''.join(tokens)))) + + wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) + print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + + prints:: + + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) + < 3: + thisFunc = paArgs[0].__class__.__name__ + "." + thisFunc + sys.stderr.write(">>entering %s(line: '%s', %d, %r)\n" % (thisFunc, line(l, s), l, t)) + try: + ret = f(*paArgs) + except Exception as exc: + try: + sys.stderr.write("< ['aa', 'bb', 'cc'] + delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + dlName = _ustr(expr) + " [" + _ustr(delim) + " " + _ustr(expr) + "]..." + if combine: + return Combine(expr + ZeroOrMore(delim + expr)).setName(dlName) + return (expr + ZeroOrMore(Suppress(delim) + expr)).setName(dlName) + + +def countedArray(expr, intExpr=None): + """Helper to define a counted list of expressions. + + This helper defines a pattern of the form:: + + integer expr expr expr... + + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the + leading count token is suppressed. + + If ``intExpr`` is specified, it should be a pyparsing expression + that produces an integer value. + + Example:: + + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) + countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] + """ + arrayExpr = Forward() + + def countFieldParseAction(s, l, t): + n = t[0] + arrayExpr << (n and Group(And([expr] * n)) or Group(empty)) + return [] + + if intExpr is None: + intExpr = Word(nums).setParseAction(lambda t: int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.setName("arrayLen") + intExpr.addParseAction(countFieldParseAction, callDuringTry=True) + return (intExpr + arrayExpr).setName("(len) " + _ustr(expr) + "...") + + +def _flatten(L): + ret = [] + for i in L: + if isinstance(i, list): + ret.extend(_flatten(i)) + else: + ret.append(i) + + return ret + + +def matchPreviousLiteral(expr): + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches a previous literal, will also match the leading + ``"1:1"`` in ``"1:10"``. If this is not desired, use + :class:`matchPreviousExpr`. Do *not* use with packrat parsing + enabled. + """ + rep = Forward() + + def copyTokenToRepeater(s, l, t): + if t: + if len(t) == 1: + rep << t[0] + else: + tflat = _flatten(t.asList()) + rep << And((Literal(tt) for tt in tflat)) + else: + rep << Empty() + + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName("(prev) " + _ustr(expr)) + return rep + + +def matchPreviousExpr(expr): + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches by expressions, will *not* match the leading ``"1:1"`` + in ``"1:10"``; the expressions are evaluated first, and then + compared, so ``"1"`` is compared with ``"10"``. Do *not* use + with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + + def copyTokenToRepeater(s, l, t): + matchTokens = _flatten(t.asList()) + + def mustMatchTheseTokens(s, l, t): + theseTokens = _flatten(t.asList()) + if theseTokens != matchTokens: + raise ParseException("", 0, "") + + rep.setParseAction(mustMatchTheseTokens, callDuringTry=True) + + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName("(prev) " + _ustr(expr)) + return rep + + +def _escapeRegexRangeChars(s): + for c in "\\^-[]": + s = s.replace(c, _bslash + c) + + s = s.replace("\n", "\\n") + s = s.replace("\t", "\\t") + return _ustr(s) + + +def oneOf(strs, caseless=False, useRegex=True, asKeyword=False): + """Helper to quickly define a set of alternative Literals, and makes + sure to do longest-first testing when there is a conflict, + regardless of the input order, but returns + a :class:`MatchFirst` for best performance. + + Parameters: + + - strs - a string of space-delimited literals, or a collection of + string literals + - caseless - (default= ``False``) - treat all literals as + caseless + - useRegex - (default= ``True``) - as an optimization, will + generate a Regex object; otherwise, will generate + a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if + creating a :class:`Regex` raises an exception) + - asKeyword - (default=``False``) - enforce Keyword-style matching on the + generated expressions + + Example:: + + comp_oper = oneOf("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + + prints:: + + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + if isinstance(caseless, basestring): + warnings.warn("More than one string argument passed to oneOf, pass choices as a list or space-delimited string", stacklevel=2) + else: + if caseless: + isequal = lambda a, b: a.upper() == b.upper() + masks = lambda a, b: b.upper().startswith(a.upper()) + parseElementClass = CaselessKeyword if asKeyword else CaselessLiteral + else: + isequal = lambda a, b: a == b + masks = lambda a, b: b.startswith(a) + parseElementClass = Keyword if asKeyword else Literal + symbols = [] + if isinstance(strs, basestring): + symbols = strs.split() + else: + if isinstance(strs, Iterable): + symbols = list(strs) + else: + warnings.warn("Invalid argument to oneOf, expected string or iterable", SyntaxWarning, + stacklevel=2) + if not symbols: + return NoMatch() + if not asKeyword: + i = 0 + while i < len(symbols) - 1: + cur = symbols[i] + for j, other in enumerate(symbols[(i + 1)[:None]]): + if isequal(other, cur): + del symbols[i + j + 1] + break + else: + if masks(cur, other): + del symbols[i + j + 1] + symbols.insert(i, other) + break + else: + i += 1 + + if not caseless: + if not asKeyword: + if useRegex: + try: + if len(symbols) == len("".join(symbols)): + return Regex("[%s]" % "".join((_escapeRegexRangeChars(sym) for sym in symbols))).setName(" | ".join(symbols)) + return Regex("|".join((re.escape(sym) for sym in symbols))).setName(" | ".join(symbols)) + except Exception: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", SyntaxWarning, + stacklevel=2) + + return MatchFirst((parseElementClass(sym) for sym in symbols)).setName(" | ".join(symbols)) + + +def dictOf(key, value): + """Helper to easily and clearly define a dictionary by specifying + the respective patterns for the key and value. Takes care of + defining the :class:`Dict`, :class:`ZeroOrMore`, and + :class:`Group` tokens in the proper order. The key pattern + can include delimiting markers or punctuation, as long as they are + suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the :class:`Dict` results + can include named token fields. + + Example:: + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + print(OneOrMore(attr_expr).parseString(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) + + # similar to Dict, but simpler call format + result = dictOf(attr_label, attr_value).parseString(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.asDict()) + + prints:: + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict(OneOrMore(Group(key + value))) + + +def originalTextFor(expr, asString=True): + """Helper to return the original, untokenized text for a given + expression. Useful to restore the parsed fields of an HTML start + tag into the raw tag text itself, or to revert separate tokens with + intervening whitespace back to the original matching input text. By + default, returns astring containing the original parsed text. + + If the optional ``asString`` argument is passed as + ``False``, then the return value is + a :class:`ParseResults` containing any results names that + were originally matched, and a single token containing the original + matched text from the input string. So if the expression passed to + :class:`originalTextFor` contains expressions with defined + results names, you must set ``asString`` to ``False`` if you + want to preserve those results name values. + + Example:: + + src = "this is test bold text normal text " + for tag in ("b", "i"): + opener, closer = makeHTMLTags(tag) + patt = originalTextFor(opener + SkipTo(closer) + closer) + print(patt.searchString(src)[0]) + + prints:: + + [' bold text '] + ['text'] + """ + locMarker = Empty().setParseAction(lambda s, loc, t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s, l, t: s[t._original_start[:t._original_end]] + else: + + def extractText(s, l, t): + t[None[:None]] = [ + s[t.pop("_original_start")[:t.pop("_original_end")]]] + + matchExpr.setParseAction(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + return matchExpr + + +def ungroup(expr): + """Helper to undo pyparsing's default grouping of And expressions, + even if all but one are non-empty. + """ + return TokenConverter(expr).addParseAction(lambda t: t[0]) + + +def locatedExpr(expr): + """Helper to decorate a returned token with its starting and ending + locations in the input string. + + This helper adds the following results names: + + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results + + Be careful if the input text contains ```` characters, you + may want to call :class:`ParserElement.parseWithTabs` + + Example:: + + wd = Word(alphas) + for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + print(match) + + prints:: + + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().setParseAction(lambda s, l, t: l) + return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) + + +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") +_escapedPunc = Word(_bslash, "\\[]-*.$+^?()~ ", exact=2).setParseAction(lambda s, l, t: t[0][1]) +_escapedHexChar = Regex("\\\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s, l, t: unichr(int(t[0].lstrip("\\0x"), 16))) +_escapedOctChar = Regex("\\\\0[0-7]+").setParseAction(lambda s, l, t: unichr(int(t[0][1[:None]], 8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn("\\]", exact=1) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group(OneOrMore(_charRange | _singleChar)).setResultsName("body") + "]" + +def srange(s): + r"""Helper to easily define string ranges for use in Word + construction. Borrows syntax from regexp '[]' string range + definitions:: + + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + + The input string must be enclosed in []'s, and the returned string + is the expanded character set joined into a single string. The + values enclosed in the []'s may be: + + - a single character + - an escaped character with a leading backslash (such as ``\-`` + or ``\]``) + - an escaped hex character with a leading ``'\x'`` + (``\x21``, which is a ``'!'`` character) (``\0x##`` + is also supported for backwards compatibility) + - an escaped octal character with a leading ``'\0'`` + (``\041``, which is a ``'!'`` character) + - a range of any of the above, separated by a dash (``'a-z'``, + etc.) + - any combination of the above (``'aeiouy'``, + ``'a-zA-Z0-9_$'``, etc.) + """ + _expanded = lambda p: if not isinstance(p, ParseResults): +p # Avoid dead code: "".join((unichr(c) for c in range(ord(p[0]), ord(p[1]) + 1))) + try: + return "".join((_expanded(part) for part in _reBracketExpr.parseString(s).body)) + except Exception: + return "" + + +def matchOnlyAtCol(n): + """Helper method for defining parse actions that require matching at + a specific column in the input text. + """ + + def verifyCol(strg, locn, toks): + if col(locn, strg) != n: + raise ParseException(strg, locn, "matched token not at column %d" % n) + + return verifyCol + + +def replaceWith(replStr): + """Helper method for common parse actions that simply return + a literal value. Especially useful when used with + :class:`transformString` (). + + Example:: + + num = Word(nums).setParseAction(lambda toks: int(toks[0])) + na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) + term = na | num + + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] + """ + return (lambda s, l, t: [ + replStr]) + + +def removeQuotes(s, l, t): + """Helper parse action for removing quotation marks from parsed + quoted strings. + + Example:: + + # by default, quotation marks are included in parsed results + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] + + # use removeQuotes to strip quotation marks from parsed results + quotedString.setParseAction(removeQuotes) + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] + """ + return t[0][1[:-1]] + + +def tokenMap(func, *args): + """Helper to define a parse action by mapping a function to all + elements of a ParseResults list. If any additional args are passed, + they are forwarded to the given function as additional arguments + after the token, as in + ``hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))``, + which will convert the parsed data to an integer using base 16. + + Example (compare the last to example in :class:`ParserElement.transformString`:: + + hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) + hex_ints.runTests(''' + 00 11 22 aa FF 0a 0d 1a + ''') + + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) + OneOrMore(upperword).runTests(''' + my kingdom for a horse + ''') + + wd = Word(alphas).setParseAction(tokenMap(str.title)) + OneOrMore(wd).setParseAction(' '.join).runTests(''' + now is the winter of our discontent made glorious summer by this sun of york + ''') + + prints:: + + 00 11 22 aa FF 0a 0d 1a + [0, 17, 34, 170, 255, 10, 13, 26] + + my kingdom for a horse + ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] + + now is the winter of our discontent made glorious summer by this sun of york + ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] + """ + + def pa(s, l, t): + return [func(tokn, *args) for tokn in t] + + try: + func_name = getattr(func, "__name__", getattr(func, "__class__").__name__) + except Exception: + func_name = str(func) + + pa.__name__ = func_name + return pa + + +upcaseTokens = tokenMap((lambda t: _ustr(t).upper())) +downcaseTokens = tokenMap((lambda t: _ustr(t).lower())) + +def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr, basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=(not xml)) + else: + resname = tagStr.name + tagAttrName = Word(alphas, alphanums + "_-:") + if xml: + tagAttrValue = dblQuotedString.copy().setParseAction(removeQuotes) + openTag = suppress_LT + tagStr("tag") + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue))) + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == "/") + suppress_GT + else: + tagAttrValue = quotedString.copy().setParseAction(removeQuotes) | Word(printables, excludeChars=">") + openTag = suppress_LT + tagStr("tag") + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens) + Optional(Suppress("=") + tagAttrValue)))) + Optional("/", default=[False])("empty").setParseAction(lambda s, l, t: t[0] == "/") + suppress_GT + closeTag = Combine((_L(""), adjacent=False) + openTag.setName("<%s>" % resname) + openTag.addParseAction(lambda t: t.__setitem__("start" + "".join(resname.replace(":", " ").title().split()), t.copy())) + closeTag = closeTag("end" + "".join(resname.replace(":", " ").title().split())).setName("" % resname) + openTag.tag = resname + closeTag.tag = resname + openTag.tag_body = SkipTo(closeTag()) + return (openTag, closeTag) + + +def makeHTMLTags(tagStr): + """Helper to construct opening and closing tag expressions for HTML, + given a tag name. Matches tags in either upper or lower case, + attributes with namespaces and with quoted or unquoted values. + + Example:: + + text = 'More info at the pyparsing wiki page' + # makeHTMLTags returns pyparsing expressions for the opening and + # closing tags as a 2-tuple + a, a_end = makeHTMLTags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.searchString(text): + # attributes in the tag (like "href" shown here) are + # also accessible as named results + print(link.link_text, '->', link.href) + + prints:: + + pyparsing -> https://github.com/pyparsing/pyparsing/wiki + """ + return _makeTags(tagStr, False) + + +def makeXMLTags(tagStr): + """Helper to construct opening and closing tag expressions for XML, + given a tag name. Matches tags only in the given upper/lower case. + + Example: similar to :class:`makeHTMLTags` + """ + return _makeTags(tagStr, True) + + +def withAttribute(*args, **attrDict): + """Helper to create a validating parse action to be used with start + tags created with :class:`makeXMLTags` or + :class:`makeHTMLTags`. Use ``withAttribute`` to qualify + a starting tag with a required attribute value, to avoid false + matches on common tags such as ```` or ``
``. + + Call ``withAttribute`` with a series of attribute names and + values. Specify the list of filter attributes names and values as: + + - keyword arguments, as in ``(align="right")``, or + - as an explicit dict with ``**`` operator, when an attribute + name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` + - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))`` + + For attribute names with a namespace prefix, you must use the second + form. Attribute names are matched insensitive to upper/lower case. + + If just testing for ``class`` (with or without a namespace), use + :class:`withClass`. + + To verify that the attribute exists, but without specifying a value, + pass ``withAttribute.ANY_VALUE`` as the value. + + Example:: + + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this has no type
+
+ + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + + prints:: + + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[None[:None]] + else: + attrs = attrDict.items() + attrs = [(k, v) for k, v in attrs] + + def pa(s, l, tokens): + for attrName, attrValue in attrs: + if attrName not in tokens: + raise ParseException(s, l, "no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s, l, "attribute '%s' has value '%s', must be '%s'" % ( + attrName, tokens[attrName], attrValue)) + + return pa + + +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """Simplified version of :class:`withAttribute` when + matching on a div class - made difficult because ``class`` is + a reserved word in Python. + + Example:: + + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this <div> has no class
+
+ + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + + prints:: + + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr: classname}) + + +opAssoc = SimpleNamespace() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation(baseExpr, opList, lpar=Suppress("("), rpar=Suppress(")")): + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary + or binary, left- or right-associative. Parse actions can also be + attached to operator expressions. The generated parser will also + recognize the use of parentheses to override operator precedences + (see example below). + + Note: if you define a deep operator list, you may see performance + issues when using infixNotation. See + :class:`ParserElement.enablePackrat` for a mechanism to potentially + improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the + nested + - opList - list of tuples, one for each operator precedence level + in the expression grammar; each tuple is of the form ``(opExpr, + numTerms, rightLeftAssoc, parseAction)``, where: + + - opExpr is the pyparsing expression for the operator; may also + be a string, which will be converted to a Literal; if numTerms + is 3, opExpr is a tuple of two expressions, for the two + operators separating the 3 terms + - numTerms is the number of terms for this operator (must be 1, + 2, or 3) + - rightLeftAssoc is the indicator whether the operator is right + or left associative, using the pyparsing-defined constants + ``opAssoc.RIGHT`` and ``opAssoc.LEFT``. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the parse action + tuple member may be omitted); if the parse action is passed + a tuple or list of functions, this is equivalent to calling + ``setParseAction(*fn)`` + (:class:`ParserElement.setParseAction`) + - lpar - expression for matching left-parentheses + (default= ``Suppress('(')``) + - rpar - expression for matching right-parentheses + (default= ``Suppress(')')``) + + Example:: + + # simple example of four-function arithmetic with ints and + # variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + + prints:: + + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + + class _FB(FollowedBy): + + def parseImpl(self, instring, loc, doActions=True): + self.expr.tryParse(instring, loc) + return (loc, []) + + ret = Forward() + lastExpr = baseExpr | lpar + ret + rpar + for i, operDef in enumerate(opList): + opExpr, arity, rightLeftAssoc, pa = (operDef + (None, ))[None[:4]] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if not opExpr is None: + if len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + else: + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + OneOrMore(opExpr)) + else: + if arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group(lastExpr + OneOrMore(opExpr + lastExpr)) + else: + matchExpr = _FB(lastExpr + lastExpr) + Group(lastExpr + OneOrMore(lastExpr)) + else: + if arity == 3: + matchExpr = _FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + Group(lastExpr + OneOrMore(opExpr1 + lastExpr + opExpr2 + lastExpr)) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + if rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = _FB(opExpr.expr + thisExpr) + Group(opExpr + thisExpr) + else: + if arity == 2: + if opExpr is not None: + matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group(lastExpr + OneOrMore(opExpr + thisExpr)) + else: + matchExpr = _FB(lastExpr + thisExpr) + Group(lastExpr + OneOrMore(thisExpr)) + else: + if arity == 3: + matchExpr = _FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + Group(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + if isinstance(pa, (tuple, list)): + (matchExpr.setParseAction)(*pa) + else: + matchExpr.setParseAction(pa) + thisExpr <<= matchExpr.setName(termName) | lastExpr + lastExpr = thisExpr + + ret <<= lastExpr + return ret + + +operatorPrecedence = infixNotation +dblQuotedString = Combine(Regex('"(?:[^"\\n\\r\\\\]|(?:"")|(?:\\\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex("'(?:[^'\\n\\r\\\\]|(?:'')|(?:\\\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("string enclosed in single quotes") +quotedString = Combine(Regex('"(?:[^"\\n\\r\\\\]|(?:"")|(?:\\\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' | Regex("'(?:[^'\\n\\r\\\\]|(?:'')|(?:\\\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L("u") + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """Helper method for defining nested lists enclosed in opening and + closing delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list + (default= ``"("``); can also be a pyparsing expression + - closer - closing character for a nested list + (default= ``")"``); can also be a pyparsing expression + - content - expression for items within the nested lists + (default= ``None``) + - ignoreExpr - expression for ignoring opening and closing + delimiters (default= :class:`quotedString`) + + If an expression is not provided for the content argument, the + nested expression will capture all whitespace-delimited content + between delimiters as a list of separate values. + + Use the ``ignoreExpr`` argument to define expressions that may + contain opening or closing characters that should not be treated as + opening or closing characters for nesting, such as quotedString or + a comment expression. Specify multiple expressions using an + :class:`Or` or :class:`MatchFirst`. The default is + :class:`quotedString`, but if no expressions are to be ignored, then + pass ``None`` for this argument. + + Example:: + + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR, RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + elif content is None: + if isinstance(opener, basestring) and isinstance(closer, basestring): + if len(opener) == 1: + if len(closer) == 1: + if ignoreExpr is not None: + content = Combine(OneOrMore(~ignoreExpr + CharsNotIn((opener + closer + ParserElement.DEFAULT_WHITE_CHARS), + exact=1))).setParseAction(lambda t: t[0].strip()) + else: + content = empty.copy() + CharsNotIn(opener + closer + ParserElement.DEFAULT_WHITE_CHARS).setParseAction(lambda t: t[0].strip()) + else: + if ignoreExpr is not None: + content = Combine(OneOrMore(~ignoreExpr + ~Literal(opener) + ~Literal(closer) + CharsNotIn((ParserElement.DEFAULT_WHITE_CHARS), exact=1))).setParseAction(lambda t: t[0].strip()) + else: + content = Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + CharsNotIn((ParserElement.DEFAULT_WHITE_CHARS), exact=1))).setParseAction(lambda t: t[0].strip()) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group(Suppress(opener) + ZeroOrMore(ignoreExpr | ret | content) + Suppress(closer)) + else: + ret <<= Group(Suppress(opener) + ZeroOrMore(ret | content) + Suppress(closer)) + ret.setName("nested %s%s expression" % (opener, closer)) + return ret + + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """Helper method for defining space-delimited indentation blocks, + such as those used to define block statements in Python source code. + + Parameters: + + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single + grammar should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond + the current level; set to False for block of left-most + statements (default= ``True``) + + A valid block must contain at least one ``blockStatement``. + + Example:: + + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group("(" + Optional(delimitedList(identifier)) + ")") + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group(funcDecl + func_body) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << (funcDef | assignment | identifier) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + + prints:: + + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + backup_stack = indentStack[None[:None]] + + def reset_stack(): + indentStack[None[:None]] = backup_stack + + def checkPeerIndent(s, l, t): + if l >= len(s): + return + curCol = col(l, s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseException(s, l, "illegal nesting") + raise ParseException(s, l, "not a peer entry") + + def checkSubIndent(s, l, t): + curCol = col(l, s) + if curCol > indentStack[-1]: + indentStack.append(curCol) + else: + raise ParseException(s, l, "not a subentry") + + def checkUnindent(s, l, t): + if l >= len(s): + return + curCol = col(l, s) + if not (indentStack and curCol in indentStack): + raise ParseException(s, l, "not an unindent") + if curCol < indentStack[-1]: + indentStack.pop() + + NL = OneOrMore((LineEnd().setWhitespaceChars("\t ").suppress()), stopOn=(StringEnd())) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName("INDENT") + PEER = Empty().setParseAction(checkPeerIndent).setName("") + UNDENT = Empty().setParseAction(checkUnindent).setName("UNINDENT") + if indent: + smExpr = Group(Optional(NL) + INDENT + OneOrMore((PEER + Group(blockStatementExpr) + Optional(NL)), stopOn=(StringEnd())) + UNDENT) + else: + smExpr = Group(Optional(NL) + OneOrMore((PEER + Group(blockStatementExpr) + Optional(NL)), stopOn=(StringEnd())) + UNDENT) + smExpr.setFailAction(lambda a, b, c, d: reset_stack()) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName("indented block") + + +alphas8bit = srange("[\\0xc0-\\0xd6\\0xd8-\\0xf6\\0xf8-\\0xff]") +punc8bit = srange("[\\0xa1-\\0xbf\\0xd7\\0xf7]") +anyOpenTag, anyCloseTag = makeHTMLTags(Word(alphas, alphanums + "_:").setName("any tag")) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(), '><& "\'')) +commonHTMLEntity = Regex("&(?P" + "|".join(_htmlEntityMap.keys()) + ");").setName("common HTML entity") + +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + + +cStyleComment = Combine(Regex("/\\*(?:[^*]|\\*(?!/))*") + "*/").setName("C style comment") +htmlComment = Regex("").setName("HTML comment") +restOfLine = Regex(".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex("//(?:\\\\\\n|[^\\n])*").setName("// comment") +cppStyleComment = Combine(Regex("/\\*(?:[^*]|\\*(?!/))*") + "*/" | dblSlashComment).setName("C++ style comment") +javaStyleComment = cppStyleComment +pythonStyleComment = Regex("#.*").setName("Python style comment") +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=",") + Optional(Word(" \t") + ~Literal(",") + ~LineEnd()))).streamline().setName("commaItem") +commaSeparatedList = delimitedList(Optional((quotedString.copy() | _commasepitem), default="")).setName("commaSeparatedList") + +class pyparsing_common: + __doc__ = "Here are some common low-level expressions that may be useful in\n jump-starting parser development:\n\n - numeric forms (:class:`integers`, :class:`reals`,\n :class:`scientific notation`)\n - common :class:`programming identifiers`\n - network addresses (:class:`MAC`,\n :class:`IPv4`, :class:`IPv6`)\n - ISO8601 :class:`dates` and\n :class:`datetime`\n - :class:`UUID`\n - :class:`comma-separated list`\n\n Parse actions:\n\n - :class:`convertToInteger`\n - :class:`convertToFloat`\n - :class:`convertToDate`\n - :class:`convertToDatetime`\n - :class:`stripHTMLTags`\n - :class:`upcaseTokens`\n - :class:`downcaseTokens`\n\n Example::\n\n pyparsing_common.number.runTests('''\n # any int or real number, returned as the appropriate type\n 100\n -100\n +100\n 3.14159\n 6.02e23\n 1e-12\n ''')\n\n pyparsing_common.fnumber.runTests('''\n # any int or real number, returned as float\n 100\n -100\n +100\n 3.14159\n 6.02e23\n 1e-12\n ''')\n\n pyparsing_common.hex_integer.runTests('''\n # hex numbers\n 100\n FF\n ''')\n\n pyparsing_common.fraction.runTests('''\n # fractions\n 1/2\n -3/4\n ''')\n\n pyparsing_common.mixed_integer.runTests('''\n # mixed fractions\n 1\n 1/2\n -3/4\n 1-3/4\n ''')\n\n import uuid\n pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))\n pyparsing_common.uuid.runTests('''\n # uuid\n 12345678-1234-5678-1234-567812345678\n ''')\n\n prints::\n\n # any int or real number, returned as the appropriate type\n 100\n [100]\n\n -100\n [-100]\n\n +100\n [100]\n\n 3.14159\n [3.14159]\n\n 6.02e23\n [6.02e+23]\n\n 1e-12\n [1e-12]\n\n # any int or real number, returned as float\n 100\n [100.0]\n\n -100\n [-100.0]\n\n +100\n [100.0]\n\n 3.14159\n [3.14159]\n\n 6.02e23\n [6.02e+23]\n\n 1e-12\n [1e-12]\n\n # hex numbers\n 100\n [256]\n\n FF\n [255]\n\n # fractions\n 1/2\n [0.5]\n\n -3/4\n [-0.75]\n\n # mixed fractions\n 1\n [1]\n\n 1/2\n [0.5]\n\n -3/4\n [-0.75]\n\n 1-3/4\n [1.75]\n\n # uuid\n 12345678-1234-5678-1234-567812345678\n [UUID('12345678-1234-5678-1234-567812345678')]\n " + convertToInteger = tokenMap(int) + convertToFloat = tokenMap(float) + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int, 16)) + signed_integer = Regex("[+-]?\\d+").setName("signed integer").setParseAction(convertToInteger) + fraction = (signed_integer().setParseAction(convertToFloat) + "/" + signed_integer().setParseAction(convertToFloat)).setName("fraction") + fraction.addParseAction(lambda t: t[0] / t[-1]) + mixed_integer = (fraction | signed_integer + Optional(Optional("-").suppress() + fraction)).setName("fraction or mixed integer-fraction") + mixed_integer.addParseAction(sum) + real = Regex("[+-]?(?:\\d+\\.\\d*|\\.\\d+)").setName("real number").setParseAction(convertToFloat) + sci_real = Regex("[+-]?(?:\\d+(?:[eE][+-]?\\d+)|(?:\\d+\\.\\d*|\\.\\d+)(?:[eE][+-]?\\d+)?)").setName("real number with scientific notation").setParseAction(convertToFloat) + number = (sci_real | real | signed_integer).streamline() + fnumber = Regex("[+-]?\\d+\\.?\\d*([eE][+-]?\\d+)?").setName("fnumber").setParseAction(convertToFloat) + identifier = Word(alphas + "_", alphanums + "_").setName("identifier") + ipv4_address = Regex("(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}").setName("IPv4 address") + _ipv6_part = Regex("[0-9a-fA-F]{1,4}").setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (":" + _ipv6_part) * 7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (":" + _ipv6_part) * (0, 6)) + "::" + Optional(_ipv6_part + (":" + _ipv6_part) * (0, + 6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum((1 for tt in t if pyparsing_common._ipv6_part.matches(tt))) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + mac_address = Regex("[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\\1[0-9a-fA-F]{2}){4}").setName("MAC address") + + @staticmethod + def convertToDate(fmt='%Y-%m-%d'): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``) + + Example:: + + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + + prints:: + + [datetime.date(1999, 12, 31)] + """ + + def cvt_fn(s, l, t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + try: + raise ParseException(s, l, str(ve)) + finally: + ve = None + del ve + + return cvt_fn + + @staticmethod + def convertToDatetime(fmt='%Y-%m-%dT%H:%M:%S.%f'): + """Helper to create a parse action for converting parsed + datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``) + + Example:: + + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + + prints:: + + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + + def cvt_fn(s, l, t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + try: + raise ParseException(s, l, str(ve)) + finally: + ve = None + del ve + + return cvt_fn + + iso8601_date = Regex("(?P\\d{4})(?:-(?P\\d\\d)(?:-(?P\\d\\d))?)?").setName("ISO8601 date") + iso8601_datetime = Regex("(?P\\d{4})-(?P\\d\\d)-(?P\\d\\d)[T ](?P\\d\\d):(?P\\d\\d)(:(?P\\d\\d(\\.\\d*)?)?)?(?PZ|[+-]\\d\\d:?\\d\\d)?").setName("ISO8601 datetime") + uuid = Regex("[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}").setName("UUID") + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + + @staticmethod + def stripHTMLTags(s, l, tokens): + """Parse action to remove HTML tags from web page HTML source + + Example:: + + # strip HTML links from normal text + text = 'More info at the
pyparsing wiki page' + td, td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + print(table_text.parseString(text).body) + + Prints:: + + More info at the pyparsing wiki page + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=",") + Optional(White(" \t")))).streamline().setName("commaItem") + comma_separated_list = delimitedList(Optional((quotedString.copy() | _commasepitem), + default="")).setName("comma separated list") + upcaseTokens = staticmethod(tokenMap((lambda t: _ustr(t).upper()))) + downcaseTokens = staticmethod(tokenMap((lambda t: _ustr(t).lower()))) + + +class _lazyclassproperty(object): + + def __init__(self, fn): + self.fn = fn + self.__doc__ = fn.__doc__ + self.__name__ = fn.__name__ + + def __get__(self, obj, cls): + if cls is None: + cls = type(obj) + if not hasattr(cls, "_intern") or any((cls._intern is getattr(superclass, "_intern", []) for superclass in cls.__mro__[1[:None]])): + cls._intern = {} + attrname = self.fn.__name__ + if attrname not in cls._intern: + cls._intern[attrname] = self.fn(cls) + return cls._intern[attrname] + + +class unicode_set(object): + __doc__ = "\n A set of Unicode characters, for language-specific strings for\n ``alphas``, ``nums``, ``alphanums``, and ``printables``.\n A unicode_set is defined by a list of ranges in the Unicode character\n set, in a class attribute ``_ranges``, such as::\n\n _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),]\n\n A unicode set can also be defined using multiple inheritance of other unicode sets::\n\n class CJK(Chinese, Japanese, Korean):\n pass\n " + _ranges = [] + + @classmethod + def _get_chars_for_ranges(cls): + ret = [] + for cc in cls.__mro__: + if cc is unicode_set: + break + for rr in cc._ranges: + ret.extend(range(rr[0], rr[-1] + 1)) + + return [unichr(c) for c in sorted(set(ret))] + + @_lazyclassproperty + def printables(cls): + """all non-whitespace characters in this range""" + return "".join(filterfalse(unicode.isspace, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def alphas(cls): + """all alphabetic characters in this range""" + return "".join(filter(unicode.isalpha, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def nums(cls): + """all numeric digit characters in this range""" + return "".join(filter(unicode.isdigit, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def alphanums(cls): + """all alphanumeric characters in this range""" + return cls.alphas + cls.nums + + +class pyparsing_unicode(unicode_set): + __doc__ = "\n A namespace class for defining common language unicode_sets.\n " + _ranges = [(32, sys.maxunicode)] + + class Latin1(unicode_set): + __doc__ = "Unicode set for Latin-1 Unicode Character Range" + _ranges = [(32, 126), (160, 255)] + + class LatinA(unicode_set): + __doc__ = "Unicode set for Latin-A Unicode Character Range" + _ranges = [(256, 383)] + + class LatinB(unicode_set): + __doc__ = "Unicode set for Latin-B Unicode Character Range" + _ranges = [(384, 591)] + + class Greek(unicode_set): + __doc__ = "Unicode set for Greek Unicode Character Ranges" + _ranges = [ + (880, 1023), (7936, 7957), (7960, 7965), (7968, 8005), + (8008, 8013), + (8016, 8023), (8025,), (8027,), + (8029,), (8031, 8061), (8064, 8116), (8118, 8132), + (8134, 8147), + (8150, 8155), (8157, 8175), (8178, 8180), (8182, 8190)] + + class Cyrillic(unicode_set): + __doc__ = "Unicode set for Cyrillic Unicode Character Range" + _ranges = [(1024, 1279)] + + class Chinese(unicode_set): + __doc__ = "Unicode set for Chinese Unicode Character Range" + _ranges = [(19968, 40959), (12288, 12351)] + + class Japanese(unicode_set): + __doc__ = "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" + _ranges = [] + + class Kanji(unicode_set): + __doc__ = "Unicode set for Kanji Unicode Character Range" + _ranges = [(19968, 40895), (12288, 12351)] + + class Hiragana(unicode_set): + __doc__ = "Unicode set for Hiragana Unicode Character Range" + _ranges = [(12352, 12447)] + + class Katakana(unicode_set): + __doc__ = "Unicode set for Katakana Unicode Character Range" + _ranges = [(12448, 12543)] + + class Korean(unicode_set): + __doc__ = "Unicode set for Korean Unicode Character Range" + _ranges = [(44032, 55215), (4352, 4607), (12592, 12687), (43360, 43391), (55216, 55295), + (12288, 12351)] + + class CJK(Chinese, Japanese, Korean): + __doc__ = "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" + + class Thai(unicode_set): + __doc__ = "Unicode set for Thai Unicode Character Range" + _ranges = [(3585, 3642), (3647, 3675)] + + class Arabic(unicode_set): + __doc__ = "Unicode set for Arabic Unicode Character Range" + _ranges = [(1536, 1563), (1566, 1791), (1792, 1919)] + + class Hebrew(unicode_set): + __doc__ = "Unicode set for Hebrew Unicode Character Range" + _ranges = [(1424, 1535)] + + class Devanagari(unicode_set): + __doc__ = "Unicode set for Devanagari Unicode Character Range" + _ranges = [(2304, 2431), (43232, 43263)] + + +pyparsing_unicode.Japanese._ranges = pyparsing_unicode.Japanese.Kanji._ranges + pyparsing_unicode.Japanese.Hiragana._ranges + pyparsing_unicode.Japanese.Katakana._ranges +if PY_3: + setattr(pyparsing_unicode, "العربية", pyparsing_unicode.Arabic) + setattr(pyparsing_unicode, "中文", pyparsing_unicode.Chinese) + setattr(pyparsing_unicode, "кириллица", pyparsing_unicode.Cyrillic) + setattr(pyparsing_unicode, "Ελληνικά", pyparsing_unicode.Greek) + setattr(pyparsing_unicode, "עִברִית", pyparsing_unicode.Hebrew) + setattr(pyparsing_unicode, "日本語", pyparsing_unicode.Japanese) + setattr(pyparsing_unicode.Japanese, "漢字", pyparsing_unicode.Japanese.Kanji) + setattr(pyparsing_unicode.Japanese, "カタカナ", pyparsing_unicode.Japanese.Katakana) + setattr(pyparsing_unicode.Japanese, "ひらがな", pyparsing_unicode.Japanese.Hiragana) + setattr(pyparsing_unicode, "한국어", pyparsing_unicode.Korean) + setattr(pyparsing_unicode, "ไทย", pyparsing_unicode.Thai) + setattr(pyparsing_unicode, "देवनागरी", pyparsing_unicode.Devanagari) + +class pyparsing_test: + __doc__ = "\n namespace class for classes useful in writing unit tests\n " + + class reset_pyparsing_context: + __doc__ = '\n Context manager to be used when writing unit tests that modify pyparsing config values:\n - packrat parsing\n - default whitespace characters.\n - default keyword characters\n - literal string auto-conversion class\n - __diag__ settings\n\n Example:\n with reset_pyparsing_context():\n # test that literals used to construct a grammar are automatically suppressed\n ParserElement.inlineLiteralsUsing(Suppress)\n\n term = Word(alphas) | Word(nums)\n group = Group(\'(\' + term[...] + \')\')\n\n # assert that the \'()\' characters are not included in the parsed tokens\n self.assertParseAndCheckLisst(group, "(abc 123 def)", [\'abc\', \'123\', \'def\'])\n\n # after exiting context manager, literals are converted to Literal expressions again\n ' + + def __init__(self): + self._save_context = {} + + def save(self): + self._save_context["default_whitespace"] = ParserElement.DEFAULT_WHITE_CHARS + self._save_context["default_keyword_chars"] = Keyword.DEFAULT_KEYWORD_CHARS + self._save_context["literal_string_class"] = ParserElement._literalStringClass + self._save_context["packrat_enabled"] = ParserElement._packratEnabled + self._save_context["packrat_parse"] = ParserElement._parse + self._save_context["__diag__"] = {name: getattr(__diag__, name) for name in __diag__._all_names} + self._save_context["__compat__"] = {"collect_all_And_tokens": (__compat__.collect_all_And_tokens)} + return self + + def restore(self): + if ParserElement.DEFAULT_WHITE_CHARS != self._save_context["default_whitespace"]: + ParserElement.setDefaultWhitespaceChars(self._save_context["default_whitespace"]) + Keyword.DEFAULT_KEYWORD_CHARS = self._save_context["default_keyword_chars"] + ParserElement.inlineLiteralsUsing(self._save_context["literal_string_class"]) + for name, value in self._save_context["__diag__"].items(): + setattr(__diag__, name, value) + + ParserElement._packratEnabled = self._save_context["packrat_enabled"] + ParserElement._parse = self._save_context["packrat_parse"] + __compat__.collect_all_And_tokens = self._save_context["__compat__"] + + def __enter__(self): + return self.save() + + def __exit__(self, *args): + return self.restore() + + class TestParseResultsAsserts: + __doc__ = "\n A mixin class to add parse results assertion methods to normal unittest.TestCase classes.\n " + + def assertParseResultsEquals(self, result, expected_list=None, expected_dict=None, msg=None): + """ + Unit test assertion to compare a ParseResults object with an optional expected_list, + and compare any defined results names with an optional expected_dict. + """ + if expected_list is not None: + self.assertEqual(expected_list, (result.asList()), msg=msg) + if expected_dict is not None: + self.assertEqual(expected_dict, (result.asDict()), msg=msg) + + def assertParseAndCheckList(self, expr, test_string, expected_list, msg=None, verbose=True): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asList() is equal to the expected_list. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_list=expected_list, msg=msg) + + def assertParseAndCheckDict(self, expr, test_string, expected_dict, msg=None, verbose=True): + """ + Convenience wrapper assert to test a parser element and input string, and assert that + the resulting ParseResults.asDict() is equal to the expected_dict. + """ + result = expr.parseString(test_string, parseAll=True) + if verbose: + print(result.dump()) + self.assertParseResultsEquals(result, expected_dict=expected_dict, msg=msg) + + def assertRunTestResults(self, run_tests_report, expected_parse_results=None, msg=None): + """ + Unit test assertion to evaluate output of ParserElement.runTests(). If a list of + list-dict tuples is given as the expected_parse_results argument, then these are zipped + with the report tuples returned by runTests and evaluated using assertParseResultsEquals. + Finally, asserts that the overall runTests() success value is True. + + :param run_tests_report: tuple(bool, [tuple(str, ParseResults or Exception)]) returned from runTests + :param expected_parse_results (optional): [tuple(str, list, dict, Exception)] + """ + run_test_success, run_test_results = run_tests_report + if expected_parse_results is not None: + merged = [(rpt[0], rpt[1], expected) for rpt, expected in zip(run_test_results, expected_parse_results)] + for test_string, result, expected in merged: + fail_msg = next((exp for exp in expected if isinstance(exp, str)), None) + expected_exception = next((exp for exp in expected if isinstance(exp, type) if issubclass(exp, Exception)), None) + if expected_exception is not None: + with self.assertRaises(expected_exception=expected_exception, + msg=(fail_msg or msg)): + if isinstance(result, Exception): + raise result + else: + expected_list = next((exp for exp in expected if isinstance(exp, list)), None) + expected_dict = next((exp for exp in expected if isinstance(exp, dict)), None) + if ( + expected_list, expected_dict) != (None, None): + self.assertParseResultsEquals(result, + expected_list=expected_list, + expected_dict=expected_dict, + msg=(fail_msg or msg)) + else: + print("no validation for {!r}".format(test_string)) + + self.assertTrue(run_test_success, + msg=(msg if msg is not None else "failed runTests")) + + @contextmanager + def assertRaisesParseException(self, exc_type=ParseException, msg=None): + with self.assertRaises(exc_type, msg=msg): + yield + + +if __name__ == "__main__": + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + ident = Word(alphas, alphanums + "_$") + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = "*" | columnNameList + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + simpleSQL.runTests('\n # \'*\' as column list and dotted table name\n select * from SYS.XYZZY\n\n # caseless match on "SELECT", and casts back to "select"\n SELECT * from XYZZY, ABC\n\n # list of column names, and mixed case SELECT keyword\n Select AA,BB,CC from Sys.dual\n\n # multiple tables\n Select A, B, C from Sys.dual, Table2\n\n # invalid SELECT keyword - should fail\n Xelect A, B, C from Sys.dual\n\n # incomplete command - should fail\n Select\n\n # invalid column name - should fail\n Select ^^^ frox Sys.dual\n\n ') + pyparsing_common.number.runTests("\n 100\n -100\n +100\n 3.14159\n 6.02e23\n 1e-12\n ") + pyparsing_common.fnumber.runTests("\n 100\n -100\n +100\n 3.14159\n 6.02e23\n 1e-12\n ") + pyparsing_common.hex_integer.runTests("\n 100\n FF\n ") + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests("\n 12345678-1234-5678-1234-567812345678\n ") diff --git a/APPS_UNCOMPILED/lib/pyrsistent/__init__.py b/APPS_UNCOMPILED/lib/pyrsistent/__init__.py new file mode 100644 index 0000000..0f0de1f --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/__init__.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/__init__.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 1479 bytes +from pyrsistent._pmap import pmap, m, PMap +from pyrsistent._pvector import pvector, v, PVector +from pyrsistent._pset import pset, s, PSet +from pyrsistent._pbag import pbag, b, PBag +from pyrsistent._plist import plist, l, PList +from pyrsistent._pdeque import pdeque, dq, PDeque +from pyrsistent._checked_types import CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError, CheckedValueTypeError, CheckedType, optional +from pyrsistent._field_common import field, PTypeError, pset_field, pmap_field, pvector_field +from pyrsistent._precord import PRecord +from pyrsistent._pclass import PClass, PClassMeta +from pyrsistent._immutable import immutable +from pyrsistent._helpers import freeze, thaw, mutant +from pyrsistent._transformations import inc, discard, rex, ny +from pyrsistent._toolz import get_in +__all__ = ('pmap', 'm', 'PMap', 'pvector', 'v', 'PVector', 'pset', 's', 'PSet', 'pbag', + 'b', 'PBag', 'plist', 'l', 'PList', 'pdeque', 'dq', 'PDeque', 'CheckedPMap', + 'CheckedPVector', 'CheckedPSet', 'InvariantException', 'CheckedKeyTypeError', + 'CheckedValueTypeError', 'CheckedType', 'optional', 'PRecord', 'field', + 'pset_field', 'pmap_field', 'pvector_field', 'PClass', 'PClassMeta', 'immutable', + 'freeze', 'thaw', 'mutant', 'get_in', 'inc', 'discard', 'rex', 'ny') diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_checked_types.py b/APPS_UNCOMPILED/lib/pyrsistent/_checked_types.py new file mode 100644 index 0000000..dfe7027 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_checked_types.py @@ -0,0 +1,439 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_checked_types.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 18360 bytes +from ._compat import Iterable +import six +from pyrsistent._compat import Enum, string_types +from pyrsistent._pmap import PMap, pmap +from pyrsistent._pset import PSet, pset +from pyrsistent._pvector import PythonPVector, python_pvector + +class CheckedType(object): + __doc__ = "\n Marker class to enable creation and serialization of checked object graphs.\n " + __slots__ = () + + @classmethod + def create(cls, source_data, _factory_fields=None): + raise NotImplementedError() + + def serialize(self, format=None): + raise NotImplementedError() + + +def _restore_pickle(cls, data): + return cls.create(data, _factory_fields=(set())) + + +class InvariantException(Exception): + __doc__ = "\n Exception raised from a :py:class:`CheckedType` when invariant tests fail or when a mandatory\n field is missing.\n\n Contains two fields of interest:\n invariant_errors, a tuple of error data for the failing invariants\n missing_fields, a tuple of strings specifying the missing names\n " + + def __init__(self, error_codes=(), missing_fields=(), *args, **kwargs): + self.invariant_errors = tuple((e() if callable(e) else e for e in error_codes)) + self.missing_fields = missing_fields + (super(InvariantException, self).__init__)(*args, **kwargs) + + def __str__(self): + return super(InvariantException, self).__str__() + ", invariant_errors=[{invariant_errors}], missing_fields=[{missing_fields}]".format(invariant_errors=(", ".join((str(e) for e in self.invariant_errors))), + missing_fields=(", ".join(self.missing_fields))) + + +_preserved_iterable_types = ( + Enum,) + +def maybe_parse_user_type(t): + """Try to coerce a user-supplied type directive into a list of types. + + This function should be used in all places where a user specifies a type, + for consistency. + + The policy for what defines valid user input should be clear from the implementation. + """ + is_type = isinstance(t, type) + is_preserved = isinstance(t, type) and issubclass(t, _preserved_iterable_types) + is_string = isinstance(t, string_types) + is_iterable = isinstance(t, Iterable) + if is_preserved: + return [ + t] + if is_string: + return [ + t] + if is_type: + if not is_iterable: + return [ + t] + if is_iterable: + ts = t + return tuple((e for t in ts for e in maybe_parse_user_type(t))) + raise TypeError("Type specifications must be types or strings. Input: {}".format(t)) + + +def maybe_parse_many_user_types(ts): + return maybe_parse_user_type(ts) + + +def _store_types(dct, bases, destination_name, source_name): + maybe_types = maybe_parse_many_user_types([d[source_name] for d in [dct] + [b.__dict__ for b in bases] if source_name in d]) + dct[destination_name] = maybe_types + + +def _merge_invariant_results(result): + verdict = True + data = [] + for verd, dat in result: + if not verd: + verdict = False + data.append(dat) + + return ( + verdict, tuple(data)) + + +def wrap_invariant(invariant): + + def f(*args, **kwargs): + result = invariant(*args, **kwargs) + if isinstance(result[0], bool): + return result + return _merge_invariant_results(result) + + return f + + +def _all_dicts(bases, seen=None): + """ + Yield each class in ``bases`` and each of their base classes. + """ + if seen is None: + seen = set() + for cls in bases: + if cls in seen: + continue + seen.add(cls) + yield cls.__dict__ + for b in _all_dicts(cls.__bases__, seen): + yield b + + +def store_invariants(dct, bases, destination_name, source_name): + invariants = [] + for ns in [dct] + list(_all_dicts(bases)): + try: + invariant = ns[source_name] + except KeyError: + continue + + invariants.append(invariant) + + if not all((callable(invariant) for invariant in invariants)): + raise TypeError("Invariants must be callable") + dct[destination_name] = tuple((wrap_invariant(inv) for inv in invariants)) + + +class _CheckedTypeMeta(type): + + def __new__(mcs, name, bases, dct): + _store_types(dct, bases, "_checked_types", "__type__") + store_invariants(dct, bases, "_checked_invariants", "__invariant__") + + def default_serializer(self, _, value): + if isinstance(value, CheckedType): + return value.serialize() + return value + + dct.setdefault("__serializer__", default_serializer) + dct["__slots__"] = () + return super(_CheckedTypeMeta, mcs).__new__(mcs, name, bases, dct) + + +class CheckedTypeError(TypeError): + + def __init__(self, source_class, expected_types, actual_type, actual_value, *args, **kwargs): + (super(CheckedTypeError, self).__init__)(*args, **kwargs) + self.source_class = source_class + self.expected_types = expected_types + self.actual_type = actual_type + self.actual_value = actual_value + + +class CheckedKeyTypeError(CheckedTypeError): + __doc__ = "\n Raised when trying to set a value using a key with a type that doesn't match the declared type.\n\n Attributes:\n source_class -- The class of the collection\n expected_types -- Allowed types\n actual_type -- The non matching type\n actual_value -- Value of the variable with the non matching type\n " + + +class CheckedValueTypeError(CheckedTypeError): + __doc__ = "\n Raised when trying to set a value using a key with a type that doesn't match the declared type.\n\n Attributes:\n source_class -- The class of the collection\n expected_types -- Allowed types\n actual_type -- The non matching type\n actual_value -- Value of the variable with the non matching type\n " + + +def _get_class(type_name): + module_name, class_name = type_name.rsplit(".", 1) + module = __import__(module_name, fromlist=[class_name]) + return getattr(module, class_name) + + +def get_type(typ): + if isinstance(typ, type): + return typ + return _get_class(typ) + + +def get_types(typs): + return [get_type(typ) for typ in typs] + + +def _check_types(it, expected_types, source_class, exception_type=CheckedValueTypeError): + if expected_types: + for e in it: + if not any((isinstance(e, get_type(t)) for t in expected_types)): + actual_type = type(e) + msg = "Type {source_class} can only be used with {expected_types}, not {actual_type}".format(source_class=(source_class.__name__), + expected_types=(tuple((get_type(et).__name__ for et in expected_types))), + actual_type=(actual_type.__name__)) + raise exception_type(source_class, expected_types, actual_type, e, msg) + + +def _invariant_errors(elem, invariants): + return [data for valid, data in (invariant(elem) for invariant in invariants) if not valid] + + +def _invariant_errors_iterable(it, invariants): + return sum([_invariant_errors(elem, invariants) for elem in it], []) + + +def optional(*typs): + """ Convenience function to specify that a value may be of any of the types in type 'typs' or None """ + return tuple(typs) + (type(None),) + + +def _checked_type_create(cls, source_data, _factory_fields=None, ignore_extra=False): + if isinstance(source_data, cls): + return source_data + types = get_types(cls._checked_types) + checked_type = next((t for t in types if issubclass(t, CheckedType)), None) + if checked_type: + return cls([checked_type.create(data, ignore_extra=ignore_extra) if not any((isinstance(data, t) for t in types)) else data for data in source_data]) + return cls(source_data) + + +@six.add_metaclass(_CheckedTypeMeta) +class CheckedPVector(PythonPVector, CheckedType): + __doc__ = "\n A CheckedPVector is a PVector which allows specifying type and invariant checks.\n\n >>> class Positives(CheckedPVector):\n ... __type__ = (int, float)\n ... __invariant__ = lambda n: (n >= 0, 'Negative')\n ...\n >>> Positives([1, 2, 3])\n Positives([1, 2, 3])\n " + __slots__ = () + + def __new__(cls, initial=()): + if type(initial) == PythonPVector: + return super(CheckedPVector, cls).__new__(cls, initial._count, initial._shift, initial._root, initial._tail) + return CheckedPVector.Evolver(cls, python_pvector()).extend(initial).persistent() + + def set(self, key, value): + return self.evolver().set(key, value).persistent() + + def append(self, val): + return self.evolver().append(val).persistent() + + def extend(self, it): + return self.evolver().extend(it).persistent() + + create = classmethod(_checked_type_create) + + def serialize(self, format=None): + serializer = self.__serializer__ + return list((serializer(format, v) for v in self)) + + def __reduce__(self): + return ( + _restore_pickle, (self.__class__, list(self))) + + class Evolver(PythonPVector.Evolver): + __slots__ = ('_destination_class', '_invariant_errors') + + def __init__(self, destination_class, vector): + super(CheckedPVector.Evolver, self).__init__(vector) + self._destination_class = destination_class + self._invariant_errors = [] + + def _check(self, it): + _check_types(it, self._destination_class._checked_types, self._destination_class) + error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants) + self._invariant_errors.extend(error_data) + + def __setitem__(self, key, value): + self._check([value]) + return super(CheckedPVector.Evolver, self).__setitem__(key, value) + + def append(self, elem): + self._check([elem]) + return super(CheckedPVector.Evolver, self).append(elem) + + def extend(self, it): + it = list(it) + self._check(it) + return super(CheckedPVector.Evolver, self).extend(it) + + def persistent(self): + if self._invariant_errors: + raise InvariantException(error_codes=(self._invariant_errors)) + result = self._orig_pvector + if self.is_dirty() or self._destination_class != type(self._orig_pvector): + pv = super(CheckedPVector.Evolver, self).persistent().extend(self._extra_tail) + result = self._destination_class(pv) + self._reset(result) + return result + + def __repr__(self): + return self.__class__.__name__ + "({0})".format(self.tolist()) + + __str__ = __repr__ + + def evolver(self): + return CheckedPVector.Evolver(self.__class__, self) + + +@six.add_metaclass(_CheckedTypeMeta) +class CheckedPSet(PSet, CheckedType): + __doc__ = "\n A CheckedPSet is a PSet which allows specifying type and invariant checks.\n\n >>> class Positives(CheckedPSet):\n ... __type__ = (int, float)\n ... __invariant__ = lambda n: (n >= 0, 'Negative')\n ...\n >>> Positives([1, 2, 3])\n Positives([1, 2, 3])\n " + __slots__ = () + + def __new__(cls, initial=()): + if type(initial) is PMap: + return super(CheckedPSet, cls).__new__(cls, initial) + evolver = CheckedPSet.Evolver(cls, pset()) + for e in initial: + evolver.add(e) + + return evolver.persistent() + + def __repr__(self): + return self.__class__.__name__ + super(CheckedPSet, self).__repr__()[4[:None]] + + def __str__(self): + return self.__repr__() + + def serialize(self, format=None): + serializer = self.__serializer__ + return set((serializer(format, v) for v in self)) + + create = classmethod(_checked_type_create) + + def __reduce__(self): + return ( + _restore_pickle, (self.__class__, list(self))) + + def evolver(self): + return CheckedPSet.Evolver(self.__class__, self) + + class Evolver(PSet._Evolver): + __slots__ = ('_destination_class', '_invariant_errors') + + def __init__(self, destination_class, original_set): + super(CheckedPSet.Evolver, self).__init__(original_set) + self._destination_class = destination_class + self._invariant_errors = [] + + def _check(self, it): + _check_types(it, self._destination_class._checked_types, self._destination_class) + error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants) + self._invariant_errors.extend(error_data) + + def add(self, element): + self._check([element]) + self._pmap_evolver[element] = True + return self + + def persistent(self): + if self._invariant_errors: + raise InvariantException(error_codes=(self._invariant_errors)) + if self.is_dirty() or self._destination_class != type(self._original_pset): + return self._destination_class(self._pmap_evolver.persistent()) + return self._original_pset + + +class _CheckedMapTypeMeta(type): + + def __new__(mcs, name, bases, dct): + _store_types(dct, bases, "_checked_key_types", "__key_type__") + _store_types(dct, bases, "_checked_value_types", "__value_type__") + store_invariants(dct, bases, "_checked_invariants", "__invariant__") + + def default_serializer(self, _, key, value): + sk = key + if isinstance(key, CheckedType): + sk = key.serialize() + sv = value + if isinstance(value, CheckedType): + sv = value.serialize() + return (sk, sv) + + dct.setdefault("__serializer__", default_serializer) + dct["__slots__"] = () + return super(_CheckedMapTypeMeta, mcs).__new__(mcs, name, bases, dct) + + +_UNDEFINED_CHECKED_PMAP_SIZE = object() + +@six.add_metaclass(_CheckedMapTypeMeta) +class CheckedPMap(PMap, CheckedType): + __doc__ = "\n A CheckedPMap is a PMap which allows specifying type and invariant checks.\n\n >>> class IntToFloatMap(CheckedPMap):\n ... __key_type__ = int\n ... __value_type__ = float\n ... __invariant__ = lambda k, v: (int(v) == k, 'Invalid mapping')\n ...\n >>> IntToFloatMap({1: 1.5, 2: 2.25})\n IntToFloatMap({1: 1.5, 2: 2.25})\n " + __slots__ = () + + def __new__(cls, initial={}, size=_UNDEFINED_CHECKED_PMAP_SIZE): + if size is not _UNDEFINED_CHECKED_PMAP_SIZE: + return super(CheckedPMap, cls).__new__(cls, size, initial) + evolver = CheckedPMap.Evolver(cls, pmap()) + for k, v in initial.items(): + evolver.set(k, v) + + return evolver.persistent() + + def evolver(self): + return CheckedPMap.Evolver(self.__class__, self) + + def __repr__(self): + return self.__class__.__name__ + "({0})".format(str(dict(self))) + + __str__ = __repr__ + + def serialize(self, format=None): + serializer = self.__serializer__ + return dict((serializer(format, k, v) for k, v in self.items())) + + @classmethod + def create(cls, source_data, _factory_fields=None): + if isinstance(source_data, cls): + return source_data + key_types = get_types(cls._checked_key_types) + checked_key_type = next((t for t in key_types if issubclass(t, CheckedType)), None) + value_types = get_types(cls._checked_value_types) + checked_value_type = next((t for t in value_types if issubclass(t, CheckedType)), None) + if checked_key_type or checked_value_type: + return cls(dict(((checked_key_type.create(key) if checked_key_type and not any((isinstance(key, t) for t in key_types)) else key, + checked_value_type.create(value) if checked_value_type and not any((isinstance(value, t) for t in value_types)) else value) for key, value in source_data.items()))) + return cls(source_data) + + def __reduce__(self): + return ( + _restore_pickle, (self.__class__, dict(self))) + + class Evolver(PMap._Evolver): + __slots__ = ('_destination_class', '_invariant_errors') + + def __init__(self, destination_class, original_map): + super(CheckedPMap.Evolver, self).__init__(original_map) + self._destination_class = destination_class + self._invariant_errors = [] + + def set(self, key, value): + _check_types([key], self._destination_class._checked_key_types, self._destination_class, CheckedKeyTypeError) + _check_types([value], self._destination_class._checked_value_types, self._destination_class) + self._invariant_errors.extend((data for valid, data in (invariant(key, value) for invariant in self._destination_class._checked_invariants) if not valid)) + return super(CheckedPMap.Evolver, self).set(key, value) + + def persistent(self): + if self._invariant_errors: + raise InvariantException(error_codes=(self._invariant_errors)) + if self.is_dirty() or type(self._original_pmap) != self._destination_class: + return self._destination_class(self._buckets_evolver.persistent(), self._size) + return self._original_pmap diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_compat.py b/APPS_UNCOMPILED/lib/pyrsistent/_compat.py new file mode 100644 index 0000000..8a005cb --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_compat.py @@ -0,0 +1,20 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_compat.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 521 bytes +from six import string_types +try: + from enum import Enum +except: + + class Enum(object): + pass + + +try: + from collections.abc import Container, Hashable, Iterable, Mapping, Sequence, Set, Sized +except ImportError: + from collections import Container, Hashable, Iterable, Mapping, Sequence, Set, Sized diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_field_common.py b/APPS_UNCOMPILED/lib/pyrsistent/_field_common.py new file mode 100644 index 0000000..7ac4c47 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_field_common.py @@ -0,0 +1,289 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_field_common.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 11554 bytes +import six, sys +from pyrsistent._checked_types import CheckedPMap, CheckedPSet, CheckedPVector, CheckedType, InvariantException, _restore_pickle, get_type, maybe_parse_user_type, maybe_parse_many_user_types +from pyrsistent._checked_types import optional as optional_type +from pyrsistent._checked_types import wrap_invariant +import inspect +PY2 = sys.version_info[0] < 3 + +def set_fields(dct, bases, name): + dct[name] = dict(sum([list(b.__dict__.get(name, {}).items()) for b in bases], [])) + for k, v in list(dct.items()): + if isinstance(v, _PField): + dct[name][k] = v + del dct[k] + + +def check_global_invariants(subject, invariants): + error_codes = tuple((error_code for is_ok, error_code in (invariant(subject) for invariant in invariants) if not is_ok)) + if error_codes: + raise InvariantException(error_codes, (), "Global invariant failed") + + +def serialize(serializer, format, value): + if isinstance(value, CheckedType): + if serializer is PFIELD_NO_SERIALIZER: + return value.serialize(format) + return serializer(format, value) + + +def check_type(destination_cls, field, name, value): + if field.type: + if not any((isinstance(value, get_type(t)) for t in field.type)): + actual_type = type(value) + message = "Invalid type for field {0}.{1}, was {2}".format(destination_cls.__name__, name, actual_type.__name__) + raise PTypeError(destination_cls, name, field.type, actual_type, message) + + +def is_type_cls(type_cls, field_type): + if type(field_type) is set: + return True + types = tuple(field_type) + if len(types) == 0: + return False + return issubclass(get_type(types[0]), type_cls) + + +def is_field_ignore_extra_complaint(type_cls, field, ignore_extra): + if not ignore_extra: + return False + else: + return is_type_cls(type_cls, field.type) or False + if PY2: + return "ignore_extra" in inspect.getargspec(field.factory).args + return "ignore_extra" in inspect.signature(field.factory).parameters + + +class _PField(object): + __slots__ = ('type', 'invariant', 'initial', 'mandatory', '_factory', 'serializer') + + def __init__(self, type, invariant, initial, mandatory, factory, serializer): + self.type = type + self.invariant = invariant + self.initial = initial + self.mandatory = mandatory + self._factory = factory + self.serializer = serializer + + @property + def factory(self): + if self._factory is PFIELD_NO_FACTORY: + if len(self.type) == 1: + typ = get_type(tuple(self.type)[0]) + if issubclass(typ, CheckedType): + return typ.create + return self._factory + + +PFIELD_NO_TYPE = () +PFIELD_NO_INVARIANT = lambda _: (True, None) +PFIELD_NO_FACTORY = lambda x: x +PFIELD_NO_INITIAL = object() +PFIELD_NO_SERIALIZER = lambda _, value: value + +def field(type=PFIELD_NO_TYPE, invariant=PFIELD_NO_INVARIANT, initial=PFIELD_NO_INITIAL, mandatory=False, factory=PFIELD_NO_FACTORY, serializer=PFIELD_NO_SERIALIZER): + """ + Field specification factory for :py:class:`PRecord`. + + :param type: a type or iterable with types that are allowed for this field + :param invariant: a function specifying an invariant that must hold for the field + :param initial: value of field if not specified when instantiating the record + :param mandatory: boolean specifying if the field is mandatory or not + :param factory: function called when field is set. + :param serializer: function that returns a serialized version of the field + """ + if isinstance(type, (list, set, tuple)): + types = set(maybe_parse_many_user_types(type)) + else: + types = set(maybe_parse_user_type(type)) + invariant_function = wrap_invariant(invariant) if (invariant != PFIELD_NO_INVARIANT and callable(invariant)) else invariant + field = _PField(type=types, invariant=invariant_function, initial=initial, mandatory=mandatory, + factory=factory, + serializer=serializer) + _check_field_parameters(field) + return field + + +def _check_field_parameters(field): + for t in field.type: + if not isinstance(t, type) or isinstance(t, six.string_types): + raise TypeError("Type parameter expected, not {0}".format(type(t))) + + if field.initial is not PFIELD_NO_INITIAL: + if not callable(field.initial): + if field.type: + if not any((isinstance(field.initial, t) for t in field.type)): + raise TypeError("Initial has invalid type {0}".format(type(field.initial))) + if not callable(field.invariant): + raise TypeError("Invariant must be callable") + if not callable(field.factory): + raise TypeError("Factory must be callable") + if not callable(field.serializer): + raise TypeError("Serializer must be callable") + + +class PTypeError(TypeError): + __doc__ = "\n Raised when trying to assign a value with a type that doesn't match the declared type.\n\n Attributes:\n source_class -- The class of the record\n field -- Field name\n expected_types -- Types allowed for the field\n actual_type -- The non matching type\n " + + def __init__(self, source_class, field, expected_types, actual_type, *args, **kwargs): + (super(PTypeError, self).__init__)(*args, **kwargs) + self.source_class = source_class + self.field = field + self.expected_types = expected_types + self.actual_type = actual_type + + +SEQ_FIELD_TYPE_SUFFIXES = {CheckedPVector: "PVector", + CheckedPSet: "PSet"} +_seq_field_types = {} + +def _restore_seq_field_pickle(checked_class, item_type, data): + """Unpickling function for auto-generated PVec/PSet field types.""" + type_ = _seq_field_types[(checked_class, item_type)] + return _restore_pickle(type_, data) + + +def _types_to_names(types): + """Convert a tuple of types to a human-readable string.""" + return "".join((get_type(typ).__name__.capitalize() for typ in types)) + + +def _make_seq_field_type(checked_class, item_type): + """Create a subclass of the given checked class with the given item type.""" + type_ = _seq_field_types.get((checked_class, item_type)) + if type_ is not None: + return type_ + + class TheType(checked_class): + __type__ = item_type + + def __reduce__(self): + return ( + _restore_seq_field_pickle, + ( + checked_class, item_type, list(self))) + + suffix = SEQ_FIELD_TYPE_SUFFIXES[checked_class] + TheType.__name__ = _types_to_names(TheType._checked_types) + suffix + _seq_field_types[(checked_class, item_type)] = TheType + return TheType + + +def _sequence_field(checked_class, item_type, optional, initial): + """ + Create checked field for either ``PSet`` or ``PVector``. + + :param checked_class: ``CheckedPSet`` or ``CheckedPVector``. + :param item_type: The required type for the items in the set. + :param optional: If true, ``None`` can be used as a value for + this field. + :param initial: Initial value to pass to factory. + + :return: A ``field`` containing a checked class. + """ + TheType = _make_seq_field_type(checked_class, item_type) + if optional: + + def factory(argument, _factory_fields=None, ignore_extra=False): + if argument is None: + return + return TheType.create(argument, _factory_fields=_factory_fields, ignore_extra=ignore_extra) + + else: + factory = TheType.create + return field(type=(optional_type(TheType) if optional else TheType), factory=factory, + mandatory=True, + initial=(factory(initial))) + + +def pset_field(item_type, optional=False, initial=()): + """ + Create checked ``PSet`` field. + + :param item_type: The required type for the items in the set. + :param optional: If true, ``None`` can be used as a value for + this field. + :param initial: Initial value to pass to factory if no value is given + for the field. + + :return: A ``field`` containing a ``CheckedPSet`` of the given type. + """ + return _sequence_field(CheckedPSet, item_type, optional, initial) + + +def pvector_field(item_type, optional=False, initial=()): + """ + Create checked ``PVector`` field. + + :param item_type: The required type for the items in the vector. + :param optional: If true, ``None`` can be used as a value for + this field. + :param initial: Initial value to pass to factory if no value is given + for the field. + + :return: A ``field`` containing a ``CheckedPVector`` of the given type. + """ + return _sequence_field(CheckedPVector, item_type, optional, initial) + + +_valid = lambda item: (True, '') +_pmap_field_types = {} + +def _restore_pmap_field_pickle(key_type, value_type, data): + """Unpickling function for auto-generated PMap field types.""" + type_ = _pmap_field_types[(key_type, value_type)] + return _restore_pickle(type_, data) + + +def _make_pmap_field_type(key_type, value_type): + """Create a subclass of CheckedPMap with the given key and value types.""" + type_ = _pmap_field_types.get((key_type, value_type)) + if type_ is not None: + return type_ + + class TheMap(CheckedPMap): + __key_type__ = key_type + __value_type__ = value_type + + def __reduce__(self): + return ( + _restore_pmap_field_pickle, + ( + self.__key_type__, self.__value_type__, dict(self))) + + TheMap.__name__ = "{0}To{1}PMap".format(_types_to_names(TheMap._checked_key_types), _types_to_names(TheMap._checked_value_types)) + _pmap_field_types[(key_type, value_type)] = TheMap + return TheMap + + +def pmap_field(key_type, value_type, optional=False, invariant=PFIELD_NO_INVARIANT): + """ + Create a checked ``PMap`` field. + + :param key: The required type for the keys of the map. + :param value: The required type for the values of the map. + :param optional: If true, ``None`` can be used as a value for + this field. + :param invariant: Pass-through to ``field``. + + :return: A ``field`` containing a ``CheckedPMap``. + """ + TheMap = _make_pmap_field_type(key_type, value_type) + if optional: + + def factory(argument): + if argument is None: + return + return TheMap.create(argument) + + else: + factory = TheMap.create + return field(mandatory=True, initial=(TheMap()), type=(optional_type(TheMap) if optional else TheMap), + factory=factory, + invariant=invariant) diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_helpers.py b/APPS_UNCOMPILED/lib/pyrsistent/_helpers.py new file mode 100644 index 0000000..ef43663 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_helpers.py @@ -0,0 +1,89 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_helpers.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 2470 bytes +from functools import wraps +import six +from pyrsistent._pmap import PMap, pmap +from pyrsistent._pset import PSet, pset +from pyrsistent._pvector import PVector, pvector + +def freeze(o): + """ + Recursively convert simple Python containers into pyrsistent versions + of those containers. + + - list is converted to pvector, recursively + - dict is converted to pmap, recursively on values (but not keys) + - set is converted to pset, but not recursively + - tuple is converted to tuple, recursively. + + Sets and dict keys are not recursively frozen because they do not contain + mutable data by convention. The main exception to this rule is that + dict keys and set elements are often instances of mutable objects that + support hash-by-id, which this function can't convert anyway. + + >>> freeze(set([1, 2])) + pset([1, 2]) + >>> freeze([1, {'a': 3}]) + pvector([1, pmap({'a': 3})]) + >>> freeze((1, [])) + (1, pvector([])) + """ + typ = type(o) + if typ is dict: + return pmap(dict(((k, freeze(v)) for k, v in six.iteritems(o)))) + if typ is list: + return pvector(map(freeze, o)) + if typ is tuple: + return tuple(map(freeze, o)) + if typ is set: + return pset(o) + return o + + +def thaw(o): + """ + Recursively convert pyrsistent containers into simple Python containers. + + - pvector is converted to list, recursively + - pmap is converted to dict, recursively on values (but not keys) + - pset is converted to set, but not recursively + - tuple is converted to tuple, recursively. + + >>> from pyrsistent import s, m, v + >>> thaw(s(1, 2)) + {1, 2} + >>> thaw(v(1, m(a=3))) + [1, {'a': 3}] + >>> thaw((1, v())) + (1, []) + """ + if isinstance(o, PVector): + return list(map(thaw, o)) + if isinstance(o, PMap): + return dict(((k, thaw(v)) for k, v in o.iteritems())) + if isinstance(o, PSet): + return set(o) + if type(o) is tuple: + return tuple(map(thaw, o)) + return o + + +def mutant(fn): + """ + Convenience decorator to isolate mutation to within the decorated function (with respect + to the input arguments). + + All arguments to the decorated function will be frozen so that they are guaranteed not to change. + The return value is also frozen. + """ + + @wraps(fn) + def inner_f(*args, **kwargs): + return freeze(fn(*[freeze(e) for e in args], **dict((freeze(item) for item in kwargs.items())))) + + return inner_f diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_immutable.py b/APPS_UNCOMPILED/lib/pyrsistent/_immutable.py new file mode 100644 index 0000000..73cb75c --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_immutable.py @@ -0,0 +1,85 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_immutable.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 3559 bytes +import sys, six + +def immutable(members='', name='Immutable', verbose=False): + """ + Produces a class that either can be used standalone or as a base class for persistent classes. + + This is a thin wrapper around a named tuple. + + Constructing a type and using it to instantiate objects: + + >>> Point = immutable('x, y', name='Point') + >>> p = Point(1, 2) + >>> p2 = p.set(x=3) + >>> p + Point(x=1, y=2) + >>> p2 + Point(x=3, y=2) + + Inheriting from a constructed type. In this case no type name needs to be supplied: + + >>> class PositivePoint(immutable('x, y')): + ... __slots__ = tuple() + ... def __new__(cls, x, y): + ... if x > 0 and y > 0: + ... return super(PositivePoint, cls).__new__(cls, x, y) + ... raise Exception('Coordinates must be positive!') + ... + >>> p = PositivePoint(1, 2) + >>> p.set(x=3) + PositivePoint(x=3, y=2) + >>> p.set(y=-3) + Traceback (most recent call last): + Exception: Coordinates must be positive! + + The persistent class also supports the notion of frozen members. The value of a frozen member + cannot be updated. For example it could be used to implement an ID that should remain the same + over time. A frozen member is denoted by a trailing underscore. + + >>> Point = immutable('x, y, id_', name='Point') + >>> p = Point(1, 2, id_=17) + >>> p.set(x=3) + Point(x=3, y=2, id_=17) + >>> p.set(id_=18) + Traceback (most recent call last): + AttributeError: Cannot set frozen members id_ + """ + if isinstance(members, six.string_types): + members = members.replace(",", " ").split() + else: + + def frozen_member_test(): + frozen_members = ["'%s'" % f for f in members if f.endswith("_")] + if frozen_members: + return "\n frozen_fields = fields_to_modify & set([{frozen_members}])\n if frozen_fields:\n raise AttributeError('Cannot set frozen members %s' % ', '.join(frozen_fields))\n ".format(frozen_members=(", ".join(frozen_members))) + return "" + + verbose_string = "" + if sys.version_info < (3, 7): + verbose_string = ", verbose={verbose}".format(verbose=verbose) + quoted_members = ", ".join(("'%s'" % m for m in members)) + template = '\nclass {class_name}(namedtuple(\'ImmutableBase\', [{quoted_members}]{verbose_string})):\n __slots__ = tuple()\n\n def __repr__(self):\n return super({class_name}, self).__repr__().replace(\'ImmutableBase\', self.__class__.__name__)\n\n def set(self, **kwargs):\n if not kwargs:\n return self\n\n fields_to_modify = set(kwargs.keys())\n if not fields_to_modify <= {member_set}:\n raise AttributeError("\'%s\' is not a member" % \', \'.join(fields_to_modify - {member_set}))\n\n {frozen_member_test}\n\n return self.__class__.__new__(self.__class__, *map(kwargs.pop, [{quoted_members}], self))\n'.format(quoted_members=quoted_members, member_set=("set([%s])" % quoted_members if quoted_members else "set()"), + frozen_member_test=(frozen_member_test()), + verbose_string=verbose_string, + class_name=name) + if verbose: + print(template) + from collections import namedtuple + namespace = dict(namedtuple=namedtuple, __name__="pyrsistent_immutable") + try: + six.exec_(template, namespace) + except SyntaxError as e: + try: + raise SyntaxError(e.message + ":\n" + template) + finally: + e = None + del e + + return namespace[name] diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_pbag.py b/APPS_UNCOMPILED/lib/pyrsistent/_pbag.py new file mode 100644 index 0000000..99d034d --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_pbag.py @@ -0,0 +1,251 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_pbag.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 6754 bytes +from ._compat import Container, Iterable, Sized, Hashable +from functools import reduce +from pyrsistent._pmap import pmap + +def _add_to_counters(counters, element): + return counters.set(element, counters.get(element, 0) + 1) + + +class PBag(object): + __doc__ = "\n A persistent bag/multiset type.\n\n Requires elements to be hashable, and allows duplicates, but has no\n ordering. Bags are hashable.\n\n Do not instantiate directly, instead use the factory functions :py:func:`b`\n or :py:func:`pbag` to create an instance.\n\n Some examples:\n\n >>> s = pbag([1, 2, 3, 1])\n >>> s2 = s.add(4)\n >>> s3 = s2.remove(1)\n >>> s\n pbag([1, 1, 2, 3])\n >>> s2\n pbag([1, 1, 2, 3, 4])\n >>> s3\n pbag([1, 2, 3, 4])\n " + __slots__ = ('_counts', '__weakref__') + + def __init__(self, counts): + self._counts = counts + + def add(self, element): + """ + Add an element to the bag. + + >>> s = pbag([1]) + >>> s2 = s.add(1) + >>> s3 = s.add(2) + >>> s2 + pbag([1, 1]) + >>> s3 + pbag([1, 2]) + """ + return PBag(_add_to_counters(self._counts, element)) + + def update(self, iterable): + """ + Update bag with all elements in iterable. + + >>> s = pbag([1]) + >>> s.update([1, 2]) + pbag([1, 1, 2]) + """ + if iterable: + return PBag(reduce(_add_to_counters, iterable, self._counts)) + return self + + def remove(self, element): + """ + Remove an element from the bag. + + >>> s = pbag([1, 1, 2]) + >>> s2 = s.remove(1) + >>> s3 = s.remove(2) + >>> s2 + pbag([1, 2]) + >>> s3 + pbag([1, 1]) + """ + if element not in self._counts: + raise KeyError(element) + else: + if self._counts[element] == 1: + newc = self._counts.remove(element) + else: + newc = self._counts.set(element, self._counts[element] - 1) + return PBag(newc) + + def count(self, element): + """ + Return the number of times an element appears. + + >>> pbag([]).count('non-existent') + 0 + >>> pbag([1, 1, 2]).count(1) + 2 + """ + return self._counts.get(element, 0) + + def __len__(self): + """ + Return the length including duplicates. + + >>> len(pbag([1, 1, 2])) + 3 + """ + return sum(self._counts.itervalues()) + + def __iter__(self): + """ + Return an iterator of all elements, including duplicates. + + >>> list(pbag([1, 1, 2])) + [1, 1, 2] + >>> list(pbag([1, 2])) + [1, 2] + """ + for elt, count in self._counts.iteritems(): + for i in range(count): + yield elt + + def __contains__(self, elt): + """ + Check if an element is in the bag. + + >>> 1 in pbag([1, 1, 2]) + True + >>> 0 in pbag([1, 2]) + False + """ + return elt in self._counts + + def __repr__(self): + return "pbag({0})".format(list(self)) + + def __eq__(self, other): + """ + Check if two bags are equivalent, honoring the number of duplicates, + and ignoring insertion order. + + >>> pbag([1, 1, 2]) == pbag([1, 2]) + False + >>> pbag([2, 1, 0]) == pbag([0, 1, 2]) + True + """ + if type(other) is not PBag: + raise TypeError("Can only compare PBag with PBags") + return self._counts == other._counts + + def __lt__(self, other): + raise TypeError("PBags are not orderable") + + __le__ = __lt__ + __gt__ = __lt__ + __ge__ = __lt__ + + def __add__(self, other): + """ + Combine elements from two PBags. + + >>> pbag([1, 2, 2]) + pbag([2, 3, 3]) + pbag([1, 2, 2, 2, 3, 3]) + """ + if not isinstance(other, PBag): + return NotImplemented + result = self._counts.evolver() + for elem, other_count in other._counts.iteritems(): + result[elem] = self.count(elem) + other_count + + return PBag(result.persistent()) + + def __sub__(self, other): + """ + Remove elements from one PBag that are present in another. + + >>> pbag([1, 2, 2, 2, 3]) - pbag([2, 3, 3, 4]) + pbag([1, 2, 2]) + """ + if not isinstance(other, PBag): + return NotImplemented + result = self._counts.evolver() + for elem, other_count in other._counts.iteritems(): + newcount = self.count(elem) - other_count + if newcount > 0: + result[elem] = newcount + elif elem in self: + result.remove(elem) + + return PBag(result.persistent()) + + def __or__(self, other): + """ + Union: Keep elements that are present in either of two PBags. + + >>> pbag([1, 2, 2, 2]) | pbag([2, 3, 3]) + pbag([1, 2, 2, 2, 3, 3]) + """ + if not isinstance(other, PBag): + return NotImplemented + result = self._counts.evolver() + for elem, other_count in other._counts.iteritems(): + count = self.count(elem) + newcount = max(count, other_count) + result[elem] = newcount + + return PBag(result.persistent()) + + def __and__(self, other): + """ + Intersection: Only keep elements that are present in both PBags. + + >>> pbag([1, 2, 2, 2]) & pbag([2, 3, 3]) + pbag([2]) + """ + if not isinstance(other, PBag): + return NotImplemented + result = pmap().evolver() + for elem, count in self._counts.iteritems(): + newcount = min(count, other.count(elem)) + if newcount > 0: + result[elem] = newcount + + return PBag(result.persistent()) + + def __hash__(self): + """ + Hash based on value of elements. + + >>> m = pmap({pbag([1, 2]): "it's here!"}) + >>> m[pbag([2, 1])] + "it's here!" + >>> pbag([1, 1, 2]) in m + False + """ + return hash(self._counts) + + +Container.register(PBag) +Iterable.register(PBag) +Sized.register(PBag) +Hashable.register(PBag) + +def b(*elements): + """ + Construct a persistent bag. + + Takes an arbitrary number of arguments to insert into the new persistent + bag. + + >>> b(1, 2, 3, 2) + pbag([1, 2, 2, 3]) + """ + return pbag(elements) + + +def pbag(elements): + """ + Convert an iterable to a persistent bag. + + Takes an iterable with elements to insert. + + >>> pbag([1, 2, 3, 2]) + pbag([1, 2, 2, 3]) + """ + if not elements: + return _EMPTY_PBAG + return PBag(reduce(_add_to_counters, elements, pmap())) + + +_EMPTY_PBAG = PBag(pmap()) diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_pclass.py b/APPS_UNCOMPILED/lib/pyrsistent/_pclass.py new file mode 100644 index 0000000..1bc74c7 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_pclass.py @@ -0,0 +1,229 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_pclass.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 9722 bytes +import six +from pyrsistent._checked_types import InvariantException, CheckedType, _restore_pickle, store_invariants +from pyrsistent._field_common import set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants +from pyrsistent._transformations import transform + +def _is_pclass(bases): + return len(bases) == 1 and bases[0] == CheckedType + + +class PClassMeta(type): + + def __new__(mcs, name, bases, dct): + set_fields(dct, bases, name="_pclass_fields") + store_invariants(dct, bases, "_pclass_invariants", "__invariant__") + dct["__slots__"] = ('_pclass_frozen', ) + tuple((key for key in dct["_pclass_fields"])) + if _is_pclass(bases): + dct["__slots__"] += ('__weakref__', ) + return super(PClassMeta, mcs).__new__(mcs, name, bases, dct) + + +_MISSING_VALUE = object() + +def _check_and_set_attr(cls, field, name, value, result, invariant_errors): + check_type(cls, field, name, value) + is_ok, error_code = field.invariant(value) + if not is_ok: + invariant_errors.append(error_code) + else: + setattr(result, name, value) + + +@six.add_metaclass(PClassMeta) +class PClass(CheckedType): + __doc__ = "\n A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting\n from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it\n is not a PMap and hence not a collection but rather a plain Python object.\n\n\n More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent\n " + + def __new__(cls, **kwargs): + result = super(PClass, cls).__new__(cls) + factory_fields = kwargs.pop("_factory_fields", None) + ignore_extra = kwargs.pop("ignore_extra", None) + missing_fields = [] + invariant_errors = [] + for name, field in cls._pclass_fields.items(): + if name in kwargs: + if factory_fields is None or name in factory_fields: + if is_field_ignore_extra_complaint(PClass, field, ignore_extra): + value = field.factory((kwargs[name]), ignore_extra=ignore_extra) + else: + value = field.factory(kwargs[name]) + else: + value = kwargs[name] + _check_and_set_attr(cls, field, name, value, result, invariant_errors) + del kwargs[name] + else: + if field.initial is not PFIELD_NO_INITIAL: + initial = field.initial() if callable(field.initial) else field.initial + _check_and_set_attr(cls, field, name, initial, result, invariant_errors) + + if invariant_errors or missing_fields: + raise InvariantException(tuple(invariant_errors), tuple(missing_fields), "Field invariant failed") + if kwargs: + raise AttributeError("'{0}' are not among the specified fields for {1}".format(", ".join(kwargs), cls.__name__)) + check_global_invariants(result, cls._pclass_invariants) + result._pclass_frozen = True + return result + + def set(self, *args, **kwargs): + """ + Set a field in the instance. Returns a new instance with the updated value. The original instance remains + unmodified. Accepts key-value pairs or single string representing the field name and a value. + + >>> from pyrsistent import PClass, field + >>> class AClass(PClass): + ... x = field() + ... + >>> a = AClass(x=1) + >>> a2 = a.set(x=2) + >>> a3 = a.set('x', 3) + >>> a + AClass(x=1) + >>> a2 + AClass(x=2) + >>> a3 + AClass(x=3) + """ + if args: + kwargs[args[0]] = args[1] + factory_fields = set(kwargs) + for key in self._pclass_fields: + if key not in kwargs: + value = getattr(self, key, _MISSING_VALUE) + if value is not _MISSING_VALUE: + kwargs[key] = value + + return (self.__class__)(_factory_fields=factory_fields, **kwargs) + + @classmethod + def createParse error at or near `LOAD_DICTCOMP' instruction at offset 22 + + def serialize(self, format=None): + """ + Serialize the current PClass using custom serializer functions for fields where + such have been supplied. + """ + result = {} + for name in self._pclass_fields: + value = getattr(self, name, _MISSING_VALUE) + if value is not _MISSING_VALUE: + result[name] = serialize(self._pclass_fields[name].serializer, format, value) + + return result + + def transform(self, *transformations): + """ + Apply transformations to the currency PClass. For more details on transformations see + the documentation for PMap. Transformations on PClasses do not support key matching + since the PClass is not a collection. Apart from that the transformations available + for other persistent types work as expected. + """ + return transform(self, transformations) + + def __eq__(self, other): + if isinstance(other, self.__class__): + for name in self._pclass_fields: + if getattr(self, name, _MISSING_VALUE) != getattr(other, name, _MISSING_VALUE): + return False + + return True + return NotImplemented + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(tuple(((key, getattr(self, key, _MISSING_VALUE)) for key in self._pclass_fields))) + + def __setattr__(self, key, value): + if getattr(self, "_pclass_frozen", False): + raise AttributeError("Can't set attribute, key={0}, value={1}".format(key, value)) + super(PClass, self).__setattr__(key, value) + + def __delattr__(self, key): + raise AttributeError("Can't delete attribute, key={0}, use remove()".format(key)) + + def _to_dict(self): + result = {} + for key in self._pclass_fields: + value = getattr(self, key, _MISSING_VALUE) + if value is not _MISSING_VALUE: + result[key] = value + + return result + + def __repr__(self): + return "{0}({1})".format(self.__class__.__name__, ", ".join(("{0}={1}".format(k, repr(v)) for k, v in self._to_dict().items()))) + + def __reduce__(self): + data = dict(((key, getattr(self, key)) for key in self._pclass_fields if hasattr(self, key))) + return (_restore_pickle, (self.__class__, data)) + + def evolver(self): + """ + Returns an evolver for this object. + """ + return _PClassEvolver(self, self._to_dict()) + + def remove(self, name): + """ + Remove attribute given by name from the current instance. Raises AttributeError if the + attribute doesn't exist. + """ + evolver = self.evolver() + del evolver[name] + return evolver.persistent() + + +class _PClassEvolver(object): + __slots__ = ('_pclass_evolver_original', '_pclass_evolver_data', '_pclass_evolver_data_is_dirty', + '_factory_fields') + + def __init__(self, original, initial_dict): + self._pclass_evolver_original = original + self._pclass_evolver_data = initial_dict + self._pclass_evolver_data_is_dirty = False + self._factory_fields = set() + + def __getitem__(self, item): + return self._pclass_evolver_data[item] + + def set(self, key, value): + if self._pclass_evolver_data.get(key, _MISSING_VALUE) is not value: + self._pclass_evolver_data[key] = value + self._factory_fields.add(key) + self._pclass_evolver_data_is_dirty = True + return self + + def __setitem__(self, key, value): + self.set(key, value) + + def remove(self, item): + if item in self._pclass_evolver_data: + del self._pclass_evolver_data[item] + self._factory_fields.discard(item) + self._pclass_evolver_data_is_dirty = True + return self + raise AttributeError(item) + + def __delitem__(self, item): + self.remove(item) + + def persistent(self): + if self._pclass_evolver_data_is_dirty: + return (self._pclass_evolver_original.__class__)(_factory_fields=self._factory_fields, **self._pclass_evolver_data) + return self._pclass_evolver_original + + def __setattr__(self, key, value): + if key not in self.__slots__: + self.set(key, value) + else: + super(_PClassEvolver, self).__setattr__(key, value) + + def __getattr__(self, item): + return self[item] \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_pdeque.py b/APPS_UNCOMPILED/lib/pyrsistent/_pdeque.py new file mode 100644 index 0000000..8b4f1bc --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_pdeque.py @@ -0,0 +1,326 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_pdeque.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 12184 bytes +from ._compat import Sequence, Hashable +from itertools import islice, chain +from numbers import Integral +from pyrsistent._plist import plist + +class PDeque(object): + __doc__ = "\n Persistent double ended queue (deque). Allows quick appends and pops in both ends. Implemented\n using two persistent lists.\n\n A maximum length can be specified to create a bounded queue.\n\n Fully supports the Sequence and Hashable protocols including indexing and slicing but\n if you need fast random access go for the PVector instead.\n\n Do not instantiate directly, instead use the factory functions :py:func:`dq` or :py:func:`pdeque` to\n create an instance.\n\n Some examples:\n\n >>> x = pdeque([1, 2, 3])\n >>> x.left\n 1\n >>> x.right\n 3\n >>> x[0] == x.left\n True\n >>> x[-1] == x.right\n True\n >>> x.pop()\n pdeque([1, 2])\n >>> x.pop() == x[:-1]\n True\n >>> x.popleft()\n pdeque([2, 3])\n >>> x.append(4)\n pdeque([1, 2, 3, 4])\n >>> x.appendleft(4)\n pdeque([4, 1, 2, 3])\n\n >>> y = pdeque([1, 2, 3], maxlen=3)\n >>> y.append(4)\n pdeque([2, 3, 4], maxlen=3)\n >>> y.appendleft(4)\n pdeque([4, 1, 2], maxlen=3)\n " + __slots__ = ('_left_list', '_right_list', '_length', '_maxlen', '__weakref__') + + def __new__(cls, left_list, right_list, length, maxlen=None): + instance = super(PDeque, cls).__new__(cls) + instance._left_list = left_list + instance._right_list = right_list + instance._length = length + if maxlen is not None: + if not isinstance(maxlen, Integral): + raise TypeError("An integer is required as maxlen") + if maxlen < 0: + raise ValueError("maxlen must be non-negative") + instance._maxlen = maxlen + return instance + + @property + def right(self): + """ + Rightmost element in dqueue. + """ + return PDeque._tip_from_lists(self._right_list, self._left_list) + + @property + def left(self): + """ + Leftmost element in dqueue. + """ + return PDeque._tip_from_lists(self._left_list, self._right_list) + + @staticmethod + def _tip_from_lists(primary_list, secondary_list): + if primary_list: + return primary_list.first + if secondary_list: + return secondary_list[-1] + raise IndexError("No elements in empty deque") + + def __iter__(self): + return chain(self._left_list, self._right_list.reverse()) + + def __repr__(self): + return "pdeque({0}{1})".format(list(self), ", maxlen={0}".format(self._maxlen) if self._maxlen is not None else "") + + __str__ = __repr__ + + @property + def maxlen(self): + """ + Maximum length of the queue. + """ + return self._maxlen + + def pop(self, count=1): + """ + Return new deque with rightmost element removed. Popping the empty queue + will return the empty queue. A optional count can be given to indicate the + number of elements to pop. Popping with a negative index is the same as + popleft. Executes in amortized O(k) where k is the number of elements to pop. + + >>> pdeque([1, 2]).pop() + pdeque([1]) + >>> pdeque([1, 2]).pop(2) + pdeque([]) + >>> pdeque([1, 2]).pop(-1) + pdeque([2]) + """ + if count < 0: + return self.popleft(-count) + new_right_list, new_left_list = PDeque._pop_lists(self._right_list, self._left_list, count) + return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen) + + def popleft(self, count=1): + """ + Return new deque with leftmost element removed. Otherwise functionally + equivalent to pop(). + + >>> pdeque([1, 2]).popleft() + pdeque([2]) + """ + if count < 0: + return self.pop(-count) + new_left_list, new_right_list = PDeque._pop_lists(self._left_list, self._right_list, count) + return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen) + + @staticmethod + def _pop_lists(primary_list, secondary_list, count): + new_primary_list = primary_list + new_secondary_list = secondary_list + while not count > 0 or new_primary_list or new_secondary_list: + count -= 1 + if new_primary_list.rest: + new_primary_list = new_primary_list.rest + elif new_primary_list: + new_primary_list = new_secondary_list.reverse() + new_secondary_list = plist() + else: + new_primary_list = new_secondary_list.reverse().rest + new_secondary_list = plist() + + return ( + new_primary_list, new_secondary_list) + + def _is_empty(self): + return not self._left_list and not self._right_list + + def __lt__(self, other): + if not isinstance(other, PDeque): + return NotImplemented + return tuple(self) < tuple(other) + + def __eq__(self, other): + if not isinstance(other, PDeque): + return NotImplemented + if tuple(self) == tuple(other): + assert len(self) == len(other) + return True + return False + + def __hash__(self): + return hash(tuple(self)) + + def __len__(self): + return self._length + + def append(self, elem): + """ + Return new deque with elem as the rightmost element. + + >>> pdeque([1, 2]).append(3) + pdeque([1, 2, 3]) + """ + new_left_list, new_right_list, new_length = self._append(self._left_list, self._right_list, elem) + return PDeque(new_left_list, new_right_list, new_length, self._maxlen) + + def appendleft(self, elem): + """ + Return new deque with elem as the leftmost element. + + >>> pdeque([1, 2]).appendleft(3) + pdeque([3, 1, 2]) + """ + new_right_list, new_left_list, new_length = self._append(self._right_list, self._left_list, elem) + return PDeque(new_left_list, new_right_list, new_length, self._maxlen) + + def _append(self, primary_list, secondary_list, elem): + if self._maxlen is not None: + if self._length == self._maxlen: + if self._maxlen == 0: + return ( + primary_list, secondary_list, 0) + new_primary_list, new_secondary_list = PDeque._pop_lists(primary_list, secondary_list, 1) + return (new_primary_list, new_secondary_list.cons(elem), self._length) + return ( + primary_list, secondary_list.cons(elem), self._length + 1) + + @staticmethod + def _extend_list(the_list, iterable): + count = 0 + for elem in iterable: + the_list = the_list.cons(elem) + count += 1 + + return (the_list, count) + + def _extend(self, primary_list, secondary_list, iterable): + new_primary_list, extend_count = PDeque._extend_list(primary_list, iterable) + new_secondary_list = secondary_list + current_len = self._length + extend_count + if self._maxlen is not None: + if current_len > self._maxlen: + pop_len = current_len - self._maxlen + new_secondary_list, new_primary_list = PDeque._pop_lists(new_secondary_list, new_primary_list, pop_len) + extend_count -= pop_len + return ( + new_primary_list, new_secondary_list, extend_count) + + def extend(self, iterable): + """ + Return new deque with all elements of iterable appended to the right. + + >>> pdeque([1, 2]).extend([3, 4]) + pdeque([1, 2, 3, 4]) + """ + new_right_list, new_left_list, extend_count = self._extend(self._right_list, self._left_list, iterable) + return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen) + + def extendleft(self, iterable): + """ + Return new deque with all elements of iterable appended to the left. + + NB! The elements will be inserted in reverse order compared to the order in the iterable. + + >>> pdeque([1, 2]).extendleft([3, 4]) + pdeque([4, 3, 1, 2]) + """ + new_left_list, new_right_list, extend_count = self._extend(self._left_list, self._right_list, iterable) + return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen) + + def count(self, elem): + """ + Return the number of elements equal to elem present in the queue + + >>> pdeque([1, 2, 1]).count(1) + 2 + """ + return self._left_list.count(elem) + self._right_list.count(elem) + + def remove(self, elem): + """ + Return new deque with first element from left equal to elem removed. If no such element is found + a ValueError is raised. + + >>> pdeque([2, 1, 2]).remove(2) + pdeque([1, 2]) + """ + try: + return PDeque(self._left_list.remove(elem), self._right_list, self._length - 1) + except ValueError: + try: + return PDeque(self._left_list, self._right_list.reverse().remove(elem).reverse(), self._length - 1) + except ValueError: + raise ValueError("{0} not found in PDeque".format(elem)) + + def reverse(self): + """ + Return reversed deque. + + >>> pdeque([1, 2, 3]).reverse() + pdeque([3, 2, 1]) + + Also supports the standard python reverse function. + + >>> reversed(pdeque([1, 2, 3])) + pdeque([3, 2, 1]) + """ + return PDeque(self._right_list, self._left_list, self._length) + + __reversed__ = reverse + + def rotate(self, steps): + """ + Return deque with elements rotated steps steps. + + >>> x = pdeque([1, 2, 3]) + >>> x.rotate(1) + pdeque([3, 1, 2]) + >>> x.rotate(-2) + pdeque([3, 1, 2]) + """ + popped_deque = self.pop(steps) + if steps >= 0: + return popped_deque.extendleft(islice(self.reverse(), steps)) + return popped_deque.extend(islice(self, -steps)) + + def __reduce__(self): + return ( + pdeque, (list(self), self._maxlen)) + + def __getitem__(self, index): + if isinstance(index, slice): + if index.step is not None: + if index.step != 1: + return pdeque((tuple(self)[index]), maxlen=(self._maxlen)) + result = self + if index.start is not None: + result = result.popleft(index.start % self._length) + if index.stop is not None: + result = result.pop(self._length - index.stop % self._length) + return result + if not isinstance(index, Integral): + raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) + if index >= 0: + return self.popleft(index).left + shifted = len(self) + index + if shifted < 0: + raise IndexError("pdeque index {0} out of range {1}".format(index, len(self))) + return self.popleft(shifted).left + + index = Sequence.index + + +Sequence.register(PDeque) +Hashable.register(PDeque) + +def pdeque(iterable=(), maxlen=None): + """ + Return deque containing the elements of iterable. If maxlen is specified then + len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen. + + >>> pdeque([1, 2, 3]) + pdeque([1, 2, 3]) + >>> pdeque([1, 2, 3, 4], maxlen=2) + pdeque([3, 4], maxlen=2) + """ + t = tuple(iterable) + if maxlen is not None: + t = t[(-maxlen)[:None]] + length = len(t) + pivot = int(length / 2) + left = plist(t[None[:pivot]]) + right = plist((t[pivot[:None]]), reverse=True) + return PDeque(left, right, length, maxlen) + + +def dq(*elements): + """ + Return deque containing all arguments. + + >>> dq(1, 2, 3) + pdeque([1, 2, 3]) + """ + return pdeque(elements) diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_plist.py b/APPS_UNCOMPILED/lib/pyrsistent/_plist.py new file mode 100644 index 0000000..28b3dbf --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_plist.py @@ -0,0 +1,273 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_plist.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 8282 bytes +from ._compat import Sequence, Hashable +from numbers import Integral +from functools import reduce + +class _PListBuilder(object): + __doc__ = "\n Helper class to allow construction of a list without\n having to reverse it in the end.\n " + __slots__ = ('_head', '_tail') + + def __init__(self): + self._head = _EMPTY_PLIST + self._tail = _EMPTY_PLIST + + def _append(self, elem, constructor): + if not self._tail: + self._head = constructor(elem) + self._tail = self._head + else: + self._tail.rest = constructor(elem) + self._tail = self._tail.rest + return self._head + + def append_elem(self, elem): + return self._append(elem, lambda e: PList(e, _EMPTY_PLIST)) + + def append_plist(self, pl): + return self._append(pl, lambda l: l) + + def build(self): + return self._head + + +class _PListBase(object): + __slots__ = ('__weakref__', ) + count = Sequence.count + index = Sequence.index + + def __reduce__(self): + return ( + plist, (list(self),)) + + def __len__(self): + """ + Return the length of the list, computed by traversing it. + + This is obviously O(n) but with the current implementation + where a list is also a node the overhead of storing the length + in every node would be quite significant. + """ + return sum((1 for _ in self)) + + def __repr__(self): + return "plist({0})".format(list(self)) + + __str__ = __repr__ + + def cons(self, elem): + """ + Return a new list with elem inserted as new head. + + >>> plist([1, 2]).cons(3) + plist([3, 1, 2]) + """ + return PList(elem, self) + + def mcons(self, iterable): + """ + Return a new list with all elements of iterable repeatedly cons:ed to the current list. + NB! The elements will be inserted in the reverse order of the iterable. + Runs in O(len(iterable)). + + >>> plist([1, 2]).mcons([3, 4]) + plist([4, 3, 1, 2]) + """ + head = self + for elem in iterable: + head = head.cons(elem) + + return head + + def reverse(self): + """ + Return a reversed version of list. Runs in O(n) where n is the length of the list. + + >>> plist([1, 2, 3]).reverse() + plist([3, 2, 1]) + + Also supports the standard reversed function. + + >>> reversed(plist([1, 2, 3])) + plist([3, 2, 1]) + """ + result = plist() + head = self + while head: + result = result.cons(head.first) + head = head.rest + + return result + + __reversed__ = reverse + + def split(self, index): + """ + Spilt the list at position specified by index. Returns a tuple containing the + list up until index and the list after the index. Runs in O(index). + + >>> plist([1, 2, 3, 4]).split(2) + (plist([1, 2]), plist([3, 4])) + """ + lb = _PListBuilder() + right_list = self + i = 0 + while right_list and i < index: + lb.append_elem(right_list.first) + right_list = right_list.rest + i += 1 + + if not right_list: + return (self, _EMPTY_PLIST) + return (lb.build(), right_list) + + def __iter__(self): + li = self + while li: + yield li.first + li = li.rest + + def __lt__(self, other): + if not isinstance(other, _PListBase): + return NotImplemented + return tuple(self) < tuple(other) + + def __eq__(self, other): + """ + Traverses the lists, checking equality of elements. + + This is an O(n) operation, but preserves the standard semantics of list equality. + """ + if not isinstance(other, _PListBase): + return NotImplemented + self_head = self + other_head = other + while self_head and other_head: + if not self_head.first == other_head.first: + return False + self_head = self_head.rest + other_head = other_head.rest + + return not self_head and not other_head + + def __getitem__(self, index): + if isinstance(index, slice): + if index.start is not None: + if index.stop is None: + if index.step is None or index.step == 1: + return self._drop(index.start) + else: + return plist(tuple(self)[index]) + if not isinstance(index, Integral): + raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) + if index < 0: + index += len(self) + try: + return self._drop(index).first + except AttributeError: + raise IndexError("PList index out of range") + + def _drop(self, count): + if count < 0: + raise IndexError("PList index out of range") + head = self + while count > 0: + head = head.rest + count -= 1 + + return head + + def __hash__(self): + return hash(tuple(self)) + + def remove(self, elem): + """ + Return new list with first element equal to elem removed. O(k) where k is the position + of the element that is removed. + + Raises ValueError if no matching element is found. + + >>> plist([1, 2, 1]).remove(1) + plist([2, 1]) + """ + builder = _PListBuilder() + head = self + while head: + if head.first == elem: + return builder.append_plist(head.rest) + builder.append_elem(head.first) + head = head.rest + + raise ValueError("{0} not found in PList".format(elem)) + + +class PList(_PListBase): + __doc__ = "\n Classical Lisp style singly linked list. Adding elements to the head using cons is O(1).\n Element access is O(k) where k is the position of the element in the list. Taking the\n length of the list is O(n).\n\n Fully supports the Sequence and Hashable protocols including indexing and slicing but\n if you need fast random access go for the PVector instead.\n\n Do not instantiate directly, instead use the factory functions :py:func:`l` or :py:func:`plist` to\n create an instance.\n\n Some examples:\n\n >>> x = plist([1, 2])\n >>> y = x.cons(3)\n >>> x\n plist([1, 2])\n >>> y\n plist([3, 1, 2])\n >>> y.first\n 3\n >>> y.rest == x\n True\n >>> y[:2]\n plist([3, 1])\n " + __slots__ = ('first', 'rest') + + def __new__(cls, first, rest): + instance = super(PList, cls).__new__(cls) + instance.first = first + instance.rest = rest + return instance + + def __bool__(self): + return True + + __nonzero__ = __bool__ + + +Sequence.register(PList) +Hashable.register(PList) + +class _EmptyPList(_PListBase): + __slots__ = () + + def __bool__(self): + return False + + __nonzero__ = __bool__ + + @property + def first(self): + raise AttributeError("Empty PList has no first") + + @property + def rest(self): + return self + + +Sequence.register(_EmptyPList) +Hashable.register(_EmptyPList) +_EMPTY_PLIST = _EmptyPList() + +def plist(iterable=(), reverse=False): + """ + Creates a new persistent list containing all elements of iterable. + Optional parameter reverse specifies if the elements should be inserted in + reverse order or not. + + >>> plist([1, 2, 3]) + plist([1, 2, 3]) + >>> plist([1, 2, 3], reverse=True) + plist([3, 2, 1]) + """ + if not reverse: + iterable = list(iterable) + iterable.reverse() + return reduce((lambda pl, elem: pl.cons(elem)), iterable, _EMPTY_PLIST) + + +def l(*elements): + """ + Creates a new persistent list containing all arguments. + + >>> l(1, 2, 3) + plist([1, 2, 3]) + """ + return plist(elements) diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_pmap.py b/APPS_UNCOMPILED/lib/pyrsistent/_pmap.py new file mode 100644 index 0000000..e6bebaa --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_pmap.py @@ -0,0 +1,416 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_pmap.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 14643 bytes +from ._compat import Mapping, Hashable +from itertools import chain +import six +from pyrsistent._pvector import pvector +from pyrsistent._transformations import transform + +class PMap(object): + __doc__ = "\n Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible.\n\n Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to\n create an instance.\n\n Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer\n re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are\n hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of\n the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid\n excessive hash collisions.\n\n This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the\n semantics are the same (more or less) the same function names have been used but for some cases it is not possible,\n for example assignments and deletion of values.\n\n PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for\n element access.\n\n Random access and insert is log32(n) where n is the size of the map.\n\n The following are examples of some common operations on persistent maps\n\n >>> m1 = m(a=1, b=3)\n >>> m2 = m1.set('c', 3)\n >>> m3 = m2.remove('a')\n >>> m1\n pmap({'b': 3, 'a': 1})\n >>> m2\n pmap({'c': 3, 'b': 3, 'a': 1})\n >>> m3\n pmap({'c': 3, 'b': 3})\n >>> m3['c']\n 3\n >>> m3.c\n 3\n " + __slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash') + + def __new__(cls, size, buckets): + self = super(PMap, cls).__new__(cls) + self._size = size + self._buckets = buckets + return self + + @staticmethod + def _get_bucket(buckets, key): + index = hash(key) % len(buckets) + bucket = buckets[index] + return (index, bucket) + + @staticmethod + def _getitem(buckets, key): + _, bucket = PMap._get_bucket(buckets, key) + if bucket: + for k, v in bucket: + if k == key: + return v + + raise KeyError(key) + + def __getitem__(self, key): + return PMap._getitem(self._buckets, key) + + @staticmethod + def _contains(buckets, key): + _, bucket = PMap._get_bucket(buckets, key) + if bucket: + for k, _ in bucket: + if k == key: + return True + + return False + return False + + def __contains__(self, key): + return self._contains(self._buckets, key) + + get = Mapping.get + + def __iter__(self): + return self.iterkeys() + + def __getattr__(self, key): + try: + return self[key] + except KeyError: + raise AttributeError("{0} has no attribute '{1}'".format(type(self).__name__, key)) + + def iterkeys(self): + for k, _ in self.iteritems(): + yield k + + def itervalues(self): + for _, v in self.iteritems(): + yield v + + def iteritems(self): + for bucket in self._buckets: + if bucket: + for k, v in bucket: + yield ( + k, v) + + def values(self): + return pvector(self.itervalues()) + + def keys(self): + return pvector(self.iterkeys()) + + def items(self): + return pvector(self.iteritems()) + + def __len__(self): + return self._size + + def __repr__(self): + return "pmap({0})".format(str(dict(self))) + + def __eq__(self, other): + if self is other: + return True + else: + return isinstance(other, Mapping) or NotImplemented + if len(self) != len(other): + return False + if isinstance(other, PMap): + if hasattr(self, "_cached_hash"): + if hasattr(other, "_cached_hash"): + if self._cached_hash != other._cached_hash: + return False + if self._buckets == other._buckets: + return True + return dict(self.iteritems()) == dict(other.iteritems()) + if isinstance(other, dict): + return dict(self.iteritems()) == other + return dict(self.iteritems()) == dict(six.iteritems(other)) + + __ne__ = Mapping.__ne__ + + def __lt__(self, other): + raise TypeError("PMaps are not orderable") + + __le__ = __lt__ + __gt__ = __lt__ + __ge__ = __lt__ + + def __str__(self): + return self.__repr__() + + def __hash__(self): + if not hasattr(self, "_cached_hash"): + self._cached_hash = hash(frozenset(self.iteritems())) + return self._cached_hash + + def set(self, key, val): + """ + Return a new PMap with key and val inserted. + + >>> m1 = m(a=1, b=2) + >>> m2 = m1.set('a', 3) + >>> m3 = m1.set('c' ,4) + >>> m1 + pmap({'b': 2, 'a': 1}) + >>> m2 + pmap({'b': 2, 'a': 3}) + >>> m3 + pmap({'c': 4, 'b': 2, 'a': 1}) + """ + return self.evolver().set(key, val).persistent() + + def remove(self, key): + """ + Return a new PMap without the element specified by key. Raises KeyError if the element + is not present. + + >>> m1 = m(a=1, b=2) + >>> m1.remove('a') + pmap({'b': 2}) + """ + return self.evolver().remove(key).persistent() + + def discard(self, key): + """ + Return a new PMap without the element specified by key. Returns reference to itself + if element is not present. + + >>> m1 = m(a=1, b=2) + >>> m1.discard('a') + pmap({'b': 2}) + >>> m1 is m1.discard('c') + True + """ + try: + return self.remove(key) + except KeyError: + return self + + def update(self, *maps): + """ + Return a new PMap with the items in Mappings inserted. If the same key is present in multiple + maps the rightmost (last) value is inserted. + + >>> m1 = m(a=1, b=2) + >>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35}) + pmap({'c': 3, 'b': 2, 'a': 17, 'd': 35}) + """ + return (self.update_with)(lambda l, r: r, *maps) + + def update_with(self, update_fn, *maps): + """ + Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple + maps the values will be merged using merge_fn going from left to right. + + >>> from operator import add + >>> m1 = m(a=1, b=2) + >>> m1.update_with(add, m(a=2)) + pmap({'b': 2, 'a': 3}) + + The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost. + + >>> m1 = m(a=1) + >>> m1.update_with(lambda l, r: l, m(a=2), {'a':3}) + pmap({'a': 1}) + """ + evolver = self.evolver() + for map in maps: + for key, value in map.items(): + evolver.set(key, update_fn(evolver[key], value) if key in evolver else value) + + return evolver.persistent() + + def __add__(self, other): + return self.update(other) + + def __reduce__(self): + return ( + pmap, (dict(self),)) + + def transform(self, *transformations): + """ + Transform arbitrarily complex combinations of PVectors and PMaps. A transformation + consists of two parts. One match expression that specifies which elements to transform + and one transformation function that performs the actual transformation. + + >>> from pyrsistent import freeze, ny + >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'}, + ... {'author': 'Steve', 'content': 'A slightly longer article'}], + ... 'weather': {'temperature': '11C', 'wind': '5m/s'}}) + >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c) + >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c) + >>> very_short_news.articles[0].content + 'A short article' + >>> very_short_news.articles[1].content + 'A slightly long...' + + When nothing has been transformed the original data structure is kept + + >>> short_news is news_paper + True + >>> very_short_news is news_paper + False + >>> very_short_news.articles[0] is news_paper.articles[0] + True + """ + return transform(self, transformations) + + def copy(self): + return self + + class _Evolver(object): + __slots__ = ('_buckets_evolver', '_size', '_original_pmap') + + def __init__(self, original_pmap): + self._original_pmap = original_pmap + self._buckets_evolver = original_pmap._buckets.evolver() + self._size = original_pmap._size + + def __getitem__(self, key): + return PMap._getitem(self._buckets_evolver, key) + + def __setitem__(self, key, val): + self.set(key, val) + + def set(self, key, val): + if len(self._buckets_evolver) < 0.67 * self._size: + self._reallocate(2 * len(self._buckets_evolver)) + else: + kv = ( + key, val) + index, bucket = PMap._get_bucket(self._buckets_evolver, key) + if bucket: + for k, v in bucket: + if k == key: + if v is not val: + new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket] + self._buckets_evolver[index] = new_bucket + return self + + new_bucket = [ + kv] + new_bucket.extend(bucket) + self._buckets_evolver[index] = new_bucket + self._size += 1 + else: + self._buckets_evolver[index] = [ + kv] + self._size += 1 + return self + + def _reallocate(self, new_size): + new_list = new_size * [None] + buckets = self._buckets_evolver.persistent() + for k, v in chain.from_iterable((x for x in buckets if x)): + index = hash(k) % new_size + if new_list[index]: + new_list[index].append((k, v)) + else: + new_list[index] = [ + ( + k, v)] + + self._buckets_evolver = pvector().evolver() + self._buckets_evolver.extend(new_list) + + def is_dirty(self): + return self._buckets_evolver.is_dirty() + + def persistent(self): + if self.is_dirty(): + self._original_pmap = PMap(self._size, self._buckets_evolver.persistent()) + return self._original_pmap + + def __len__(self): + return self._size + + def __contains__(self, key): + return PMap._contains(self._buckets_evolver, key) + + def __delitem__(self, key): + self.remove(key) + + def remove(self, key): + index, bucket = PMap._get_bucket(self._buckets_evolver, key) + if bucket: + new_bucket = [(k, v) for k, v in bucket if k != key] + if len(bucket) > len(new_bucket): + self._buckets_evolver[index] = new_bucket if new_bucket else None + self._size -= 1 + return self + raise KeyError("{0}".format(key)) + + def evolver(self): + """ + Create a new evolver for this pmap. For a discussion on evolvers in general see the + documentation for the pvector evolver. + + Create the evolver and perform various mutating updates to it: + + >>> m1 = m(a=1, b=2) + >>> e = m1.evolver() + >>> e['c'] = 3 + >>> len(e) + 3 + >>> del e['a'] + + The underlying pmap remains the same: + + >>> m1 + pmap({'b': 2, 'a': 1}) + + The changes are kept in the evolver. An updated pmap can be created using the + persistent() function on the evolver. + + >>> m2 = e.persistent() + >>> m2 + pmap({'c': 3, 'b': 2}) + + The new pmap will share data with the original pmap in the same way that would have + been done if only using operations on the pmap. + """ + return self._Evolver(self) + + +Mapping.register(PMap) +Hashable.register(PMap) + +def _turbo_mapping(initial, pre_size): + if pre_size: + size = pre_size + else: + try: + size = 2 * len(initial) or 8 + except Exception: + size = 8 + + buckets = size * [None] + if not isinstance(initial, Mapping): + initial = dict(initial) + for k, v in six.iteritems(initial): + h = hash(k) + index = h % size + bucket = buckets[index] + if bucket: + bucket.append((k, v)) + else: + buckets[index] = [ + ( + k, v)] + + return PMap(len(initial), pvector().extend(buckets)) + + +_EMPTY_PMAP = _turbo_mapping({}, 0) + +def pmap(initial={}, pre_size=0): + """ + Create new persistent map, inserts all elements in initial into the newly created map. + The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This + may have a positive performance impact in the cases where you know beforehand that a large number of elements + will be inserted into the map eventually since it will reduce the number of reallocations required. + + >>> pmap({'a': 13, 'b': 14}) + pmap({'b': 14, 'a': 13}) + """ + if not initial: + return _EMPTY_PMAP + return _turbo_mapping(initial, pre_size) + + +def m(**kwargs): + """ + Creates a new persitent map. Inserts all key value arguments into the newly created map. + + >>> m(a=13, b=14) + pmap({'b': 14, 'a': 13}) + """ + return pmap(kwargs) diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_precord.py b/APPS_UNCOMPILED/lib/pyrsistent/_precord.py new file mode 100644 index 0000000..eb80a1d --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_precord.py @@ -0,0 +1,134 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_precord.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 7019 bytes +import six +from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants +from pyrsistent._field_common import set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants +from pyrsistent._pmap import PMap, pmap + +class _PRecordMeta(type): + + def __new__(mcs, name, bases, dct): + set_fields(dct, bases, name="_precord_fields") + store_invariants(dct, bases, "_precord_invariants", "__invariant__") + dct["_precord_mandatory_fields"] = set((name for name, field in dct["_precord_fields"].items() if field.mandatory)) + dct["_precord_initial_values"] = dict(((k, field.initial) for k, field in dct["_precord_fields"].items() if field.initial is not PFIELD_NO_INITIAL)) + dct["__slots__"] = () + return super(_PRecordMeta, mcs).__new__(mcs, name, bases, dct) + + +@six.add_metaclass(_PRecordMeta) +class PRecord(PMap, CheckedType): + __doc__ = "\n A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting\n from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element\n access using subscript notation.\n\n More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent\n " + + def __new__(cls, **kwargs): + if "_precord_size" in kwargs: + if "_precord_buckets" in kwargs: + return super(PRecord, cls).__new__(cls, kwargs["_precord_size"], kwargs["_precord_buckets"]) + factory_fields = kwargs.pop("_factory_fields", None) + ignore_extra = kwargs.pop("_ignore_extra", False) + initial_values = kwargs + if cls._precord_initial_values: + initial_values = dict(((k, v() if callable(v) else v) for k, v in cls._precord_initial_values.items())) + initial_values.update(kwargs) + e = _PRecordEvolver(cls, (pmap()), _factory_fields=factory_fields, _ignore_extra=ignore_extra) + for k, v in initial_values.items(): + e[k] = v + + return e.persistent() + + def set(self, *args, **kwargs): + """ + Set a field in the record. This set function differs slightly from that in the PMap + class. First of all it accepts key-value pairs. Second it accepts multiple key-value + pairs to perform one, atomic, update of multiple fields. + """ + if args: + return super(PRecord, self).set(args[0], args[1]) + return self.update(kwargs) + + def evolver(self): + """ + Returns an evolver of this object. + """ + return _PRecordEvolver(self.__class__, self) + + def __repr__(self): + return "{0}({1})".format(self.__class__.__name__, ", ".join(("{0}={1}".format(k, repr(v)) for k, v in self.items()))) + + @classmethod + def createParse error at or near `LOAD_DICTCOMP' instruction at offset 22 + + def __reduce__(self): + return ( + _restore_pickle, (self.__class__, dict(self))) + + def serialize(self, format=None): + """ + Serialize the current PRecord using custom serializer functions for fields where + such have been supplied. + """ + return dict(((k, serialize(self._precord_fields[k].serializer, format, v)) for k, v in self.items())) + + +class _PRecordEvolver(PMap._Evolver): + __slots__ = ('_destination_cls', '_invariant_error_codes', '_missing_fields', '_factory_fields', + '_ignore_extra') + + def __init__(self, cls, original_pmap, _factory_fields=None, _ignore_extra=False): + super(_PRecordEvolver, self).__init__(original_pmap) + self._destination_cls = cls + self._invariant_error_codes = [] + self._missing_fields = [] + self._factory_fields = _factory_fields + self._ignore_extra = _ignore_extra + + def __setitem__(self, key, original_value): + self.set(key, original_value) + + def set(self, key, original_value): + field = self._destination_cls._precord_fields.get(key) + if field: + if self._factory_fields is None or field in self._factory_fields: + try: + if is_field_ignore_extra_complaint(PRecord, field, self._ignore_extra): + value = field.factory(original_value, ignore_extra=(self._ignore_extra)) + else: + value = field.factory(original_value) + except InvariantException as e: + try: + self._invariant_error_codes += e.invariant_errors + self._missing_fields += e.missing_fields + return self + finally: + e = None + del e + + else: + value = original_value + check_type(self._destination_cls, field, key, value) + is_ok, error_code = field.invariant(value) + if not is_ok: + self._invariant_error_codes.append(error_code) + return super(_PRecordEvolver, self).set(key, value) + raise AttributeError("'{0}' is not among the specified fields for {1}".format(key, self._destination_cls.__name__)) + + def persistent(self): + cls = self._destination_cls + is_dirty = self.is_dirty() + pm = super(_PRecordEvolver, self).persistent() + if not is_dirty: + if not isinstance(pm, cls): + result = cls(_precord_buckets=(pm._buckets), _precord_size=(pm._size)) + else: + result = pm + if cls._precord_mandatory_fields: + self._missing_fields += tuple(("{0}.{1}".format(cls.__name__, f) for f in cls._precord_mandatory_fields - set(result.keys()))) + if self._invariant_error_codes or self._missing_fields: + raise InvariantException(tuple(self._invariant_error_codes), tuple(self._missing_fields), "Field invariant failed") + check_global_invariants(result, cls._precord_invariants) + return result \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_pset.py b/APPS_UNCOMPILED/lib/pyrsistent/_pset.py new file mode 100644 index 0000000..91fc530 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_pset.py @@ -0,0 +1,200 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_pset.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 5718 bytes +from ._compat import Set, Hashable +import sys +from pyrsistent._pmap import pmap +PY2 = sys.version_info[0] < 3 + +class PSet(object): + __doc__ = "\n Persistent set implementation. Built on top of the persistent map. The set supports all operations\n in the Set protocol and is Hashable.\n\n Do not instantiate directly, instead use the factory functions :py:func:`s` or :py:func:`pset`\n to create an instance.\n\n Random access and insert is log32(n) where n is the size of the set.\n\n Some examples:\n\n >>> s = pset([1, 2, 3, 1])\n >>> s2 = s.add(4)\n >>> s3 = s2.remove(2)\n >>> s\n pset([1, 2, 3])\n >>> s2\n pset([1, 2, 3, 4])\n >>> s3\n pset([1, 3, 4])\n " + __slots__ = ('_map', '__weakref__') + + def __new__(cls, m): + self = super(PSet, cls).__new__(cls) + self._map = m + return self + + def __contains__(self, element): + return element in self._map + + def __iter__(self): + return iter(self._map) + + def __len__(self): + return len(self._map) + + def __repr__(self): + return PY2 or self or "p" + str(set(self)) + return "pset([{0}])".format(str(set(self))[1[:-1]]) + + def __str__(self): + return self.__repr__() + + def __hash__(self): + return hash(self._map) + + def __reduce__(self): + return ( + pset, (list(self),)) + + @classmethod + def _from_iterable(cls, it, pre_size=8): + return PSet(pmap((dict(((k, True) for k in it))), pre_size=pre_size)) + + def add(self, element): + """ + Return a new PSet with element added + + >>> s1 = s(1, 2) + >>> s1.add(3) + pset([1, 2, 3]) + """ + return self.evolver().add(element).persistent() + + def update(self, iterable): + """ + Return a new PSet with elements in iterable added + + >>> s1 = s(1, 2) + >>> s1.update([3, 4, 4]) + pset([1, 2, 3, 4]) + """ + e = self.evolver() + for element in iterable: + e.add(element) + + return e.persistent() + + def remove(self, element): + """ + Return a new PSet with element removed. Raises KeyError if element is not present. + + >>> s1 = s(1, 2) + >>> s1.remove(2) + pset([1]) + """ + if element in self._map: + return self.evolver().remove(element).persistent() + raise KeyError("Element '%s' not present in PSet" % element) + + def discard(self, element): + """ + Return a new PSet with element removed. Returns itself if element is not present. + """ + if element in self._map: + return self.evolver().remove(element).persistent() + return self + + class _Evolver(object): + __slots__ = ('_original_pset', '_pmap_evolver') + + def __init__(self, original_pset): + self._original_pset = original_pset + self._pmap_evolver = original_pset._map.evolver() + + def add(self, element): + self._pmap_evolver[element] = True + return self + + def remove(self, element): + del self._pmap_evolver[element] + return self + + def is_dirty(self): + return self._pmap_evolver.is_dirty() + + def persistent(self): + if not self.is_dirty(): + return self._original_pset + return PSet(self._pmap_evolver.persistent()) + + def __len__(self): + return len(self._pmap_evolver) + + def copy(self): + return self + + def evolver(self): + """ + Create a new evolver for this pset. For a discussion on evolvers in general see the + documentation for the pvector evolver. + + Create the evolver and perform various mutating updates to it: + + >>> s1 = s(1, 2, 3) + >>> e = s1.evolver() + >>> _ = e.add(4) + >>> len(e) + 4 + >>> _ = e.remove(1) + + The underlying pset remains the same: + + >>> s1 + pset([1, 2, 3]) + + The changes are kept in the evolver. An updated pmap can be created using the + persistent() function on the evolver. + + >>> s2 = e.persistent() + >>> s2 + pset([2, 3, 4]) + + The new pset will share data with the original pset in the same way that would have + been done if only using operations on the pset. + """ + return PSet._Evolver(self) + + __le__ = Set.__le__ + __lt__ = Set.__lt__ + __gt__ = Set.__gt__ + __ge__ = Set.__ge__ + __eq__ = Set.__eq__ + __ne__ = Set.__ne__ + __and__ = Set.__and__ + __or__ = Set.__or__ + __sub__ = Set.__sub__ + __xor__ = Set.__xor__ + issubset = __le__ + issuperset = __ge__ + union = __or__ + intersection = __and__ + difference = __sub__ + symmetric_difference = __xor__ + isdisjoint = Set.isdisjoint + + +Set.register(PSet) +Hashable.register(PSet) +_EMPTY_PSET = PSet(pmap()) + +def pset(iterable=(), pre_size=8): + """ + Creates a persistent set from iterable. Optionally takes a sizing parameter equivalent to that + used for :py:func:`pmap`. + + >>> s1 = pset([1, 2, 3, 2]) + >>> s1 + pset([1, 2, 3]) + """ + if not iterable: + return _EMPTY_PSET + return PSet._from_iterable(iterable, pre_size=pre_size) + + +def s(*elements): + """ + Create a persistent set. + + Takes an arbitrary number of arguments to insert into the new set. + + >>> s1 = s(1, 2, 3, 2) + >>> s1 + pset([1, 2, 3]) + """ + return pset(elements) diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_pvector.py b/APPS_UNCOMPILED/lib/pyrsistent/_pvector.py new file mode 100644 index 0000000..8002b48 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_pvector.py @@ -0,0 +1,649 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_pvector.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 22715 bytes +from abc import abstractmethod, ABCMeta +from ._compat import Sequence, Hashable +from numbers import Integral +import operator, six +from pyrsistent._transformations import transform + +def _bitcount(val): + return bin(val).count("1") + + +BRANCH_FACTOR = 32 +BIT_MASK = BRANCH_FACTOR - 1 +SHIFT = _bitcount(BIT_MASK) + +def compare_pvector(v, other, operator): + return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other) + + +def _index_or_slice(index, stop): + if stop is None: + return index + return slice(index, stop) + + +class PythonPVector(object): + __doc__ = "\n Support structure for PVector that implements structural sharing for vectors using a trie.\n " + __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__') + + def __new__(cls, count, shift, root, tail): + self = super(PythonPVector, cls).__new__(cls) + self._count = count + self._shift = shift + self._root = root + self._tail = tail + self._tail_offset = self._count - len(self._tail) + return self + + def __len__(self): + return self._count + + def __getitem__(self, index): + if isinstance(index, slice): + if index.start is None: + if index.stop is None: + if index.step is None: + return self + return _EMPTY_PVECTOR.extend(self.tolist()[index]) + if index < 0: + index += self._count + return PythonPVector._node_for(self, index)[index & BIT_MASK] + + def __add__(self, other): + return self.extend(other) + + def __repr__(self): + return "pvector({0})".format(str(self.tolist())) + + def __str__(self): + return self.__repr__() + + def __iter__(self): + return iter(self.tolist()) + + def __ne__(self, other): + return not self.__eq__(other) + + def __eq__(self, other): + return self is other or hasattr(other, "__len__") and self._count == len(other) and compare_pvector(self, other, operator.eq) + + def __gt__(self, other): + return compare_pvector(self, other, operator.gt) + + def __lt__(self, other): + return compare_pvector(self, other, operator.lt) + + def __ge__(self, other): + return compare_pvector(self, other, operator.ge) + + def __le__(self, other): + return compare_pvector(self, other, operator.le) + + def __mul__(self, times): + if times <= 0 or self is _EMPTY_PVECTOR: + return _EMPTY_PVECTOR + if times == 1: + return self + return _EMPTY_PVECTOR.extend(times * self.tolist()) + + __rmul__ = __mul__ + + def _fill_list(self, node, shift, the_list): + if shift: + shift -= SHIFT + for n in node: + self._fill_list(n, shift, the_list) + + else: + the_list.extend(node) + + def tolist(self): + """ + The fastest way to convert the vector into a python list. + """ + the_list = [] + self._fill_list(self._root, self._shift, the_list) + the_list.extend(self._tail) + return the_list + + def _totuple(self): + """ + Returns the content as a python tuple. + """ + return tuple(self.tolist()) + + def __hash__(self): + return hash(self._totuple()) + + def transform(self, *transformations): + return transform(self, transformations) + + def __reduce__(self): + return ( + pvector, (self.tolist(),)) + + def mset(self, *args): + if len(args) % 2: + raise TypeError("mset expected an even number of arguments") + evolver = self.evolver() + for i in range(0, len(args), 2): + evolver[args[i]] = args[i + 1] + + return evolver.persistent() + + class Evolver(object): + __slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes', + '_extra_tail', '_cached_leafs', '_orig_pvector') + + def __init__(self, v): + self._reset(v) + + def __getitem__(self, index): + if not isinstance(index, Integral): + raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) + elif index < 0: + index += self._count + len(self._extra_tail) + if self._count <= index < self._count + len(self._extra_tail): + return self._extra_tail[index - self._count] + return PythonPVector._node_for(self, index)[index & BIT_MASK] + + def _reset(self, v): + self._count = v._count + self._shift = v._shift + self._root = v._root + self._tail = v._tail + self._tail_offset = v._tail_offset + self._dirty_nodes = {} + self._cached_leafs = {} + self._extra_tail = [] + self._orig_pvector = v + + def append(self, element): + self._extra_tail.append(element) + return self + + def extend(self, iterable): + self._extra_tail.extend(iterable) + return self + + def set(self, index, val): + self[index] = val + return self + + def __setitem__(self, index, val): + if not isinstance(index, Integral): + raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__) + if index < 0: + index += self._count + len(self._extra_tail) + if 0 <= index < self._count: + node = self._cached_leafs.get(index >> SHIFT) + if node: + node[index & BIT_MASK] = val + else: + if index >= self._tail_offset: + if id(self._tail) not in self._dirty_nodes: + self._tail = list(self._tail) + self._dirty_nodes[id(self._tail)] = True + self._cached_leafs[index >> SHIFT] = self._tail + self._tail[index & BIT_MASK] = val + else: + self._root = self._do_set(self._shift, self._root, index, val) + else: + if self._count <= index < self._count + len(self._extra_tail): + self._extra_tail[index - self._count] = val + else: + if index == self._count + len(self._extra_tail): + self._extra_tail.append(val) + else: + raise IndexError("Index out of range: %s" % (index,)) + + def _do_set(self, level, node, i, val): + if id(node) in self._dirty_nodes: + ret = node + else: + ret = list(node) + self._dirty_nodes[id(ret)] = True + if level == 0: + ret[i & BIT_MASK] = val + self._cached_leafs[i >> SHIFT] = ret + else: + sub_index = i >> level & BIT_MASK + ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val) + return ret + + def delete(self, index): + del self[index] + return self + + def __delitem__(self, key): + if self._orig_pvector: + l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist() + l.extend(self._extra_tail) + self._reset(_EMPTY_PVECTOR) + self._extra_tail = l + del self._extra_tail[key] + + def persistent(self): + result = self._orig_pvector + if self.is_dirty(): + result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail) + self._reset(result) + return result + + def __len__(self): + return self._count + len(self._extra_tail) + + def is_dirty(self): + return bool(self._dirty_nodes or self._extra_tail) + + def evolver(self): + return PythonPVector.Evolver(self) + + def set(self, i, val): + if not isinstance(i, Integral): + raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__) + elif i < 0: + i += self._count + if 0 <= i < self._count: + if i >= self._tail_offset: + new_tail = list(self._tail) + new_tail[i & BIT_MASK] = val + return PythonPVector(self._count, self._shift, self._root, new_tail) + return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail) + if i == self._count: + return self.append(val) + raise IndexError("Index out of range: %s" % (i,)) + + def _do_set(self, level, node, i, val): + ret = list(node) + if level == 0: + ret[i & BIT_MASK] = val + else: + sub_index = i >> level & BIT_MASK + ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val) + return ret + + @staticmethod + def _node_for(pvector_like, i): + if 0 <= i < pvector_like._count: + if i >= pvector_like._tail_offset: + return pvector_like._tail + node = pvector_like._root + for level in range(pvector_like._shift, 0, -SHIFT): + node = node[i >> level & BIT_MASK] + + return node + raise IndexError("Index out of range: %s" % (i,)) + + def _create_new_root(self): + new_shift = self._shift + if self._count >> SHIFT > 1 << self._shift: + new_root = [ + self._root, self._new_path(self._shift, self._tail)] + new_shift += SHIFT + else: + new_root = self._push_tail(self._shift, self._root, self._tail) + return (new_root, new_shift) + + def append(self, val): + if len(self._tail) < BRANCH_FACTOR: + new_tail = list(self._tail) + new_tail.append(val) + return PythonPVector(self._count + 1, self._shift, self._root, new_tail) + new_root, new_shift = self._create_new_root() + return PythonPVector(self._count + 1, new_shift, new_root, [val]) + + def _new_path(self, level, node): + if level == 0: + return node + return [self._new_path(level - SHIFT, node)] + + def _mutating_insert_tail(self): + self._root, self._shift = self._create_new_root() + self._tail = [] + + def _mutating_fill_tail(self, offset, sequence): + max_delta_len = BRANCH_FACTOR - len(self._tail) + delta = sequence[offset[:offset + max_delta_len]] + self._tail.extend(delta) + delta_len = len(delta) + self._count += delta_len + return offset + delta_len + + def _mutating_extend(self, sequence): + offset = 0 + sequence_len = len(sequence) + while offset < sequence_len: + offset = self._mutating_fill_tail(offset, sequence) + if len(self._tail) == BRANCH_FACTOR: + self._mutating_insert_tail() + + self._tail_offset = self._count - len(self._tail) + + def extend(self, obj): + l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj) + if l: + new_vector = self.append(l[0]) + new_vector._mutating_extend(l[1[:None]]) + return new_vector + return self + + def _push_tail(self, level, parent, tail_node): + """ + if parent is leaf, insert node, + else does it map to an existing child? -> + node_to_insert = push node one more level + else alloc new path + + return node_to_insert placed in copy of parent + """ + ret = list(parent) + if level == SHIFT: + ret.append(tail_node) + return ret + sub_index = self._count - 1 >> level & BIT_MASK + if len(parent) > sub_index: + ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node) + return ret + ret.append(self._new_path(level - SHIFT, tail_node)) + return ret + + def index(self, value, *args, **kwargs): + return (self.tolist().index)(value, *args, **kwargs) + + def count(self, value): + return self.tolist().count(value) + + def delete(self, index, stop=None): + l = self.tolist() + del l[_index_or_slice(index, stop)] + return _EMPTY_PVECTOR.extend(l) + + def remove(self, value): + l = self.tolist() + l.remove(value) + return _EMPTY_PVECTOR.extend(l) + + +@six.add_metaclass(ABCMeta) +class PVector(object): + __doc__ = "\n Persistent vector implementation. Meant as a replacement for the cases where you would normally\n use a Python list.\n\n Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to\n create an instance.\n\n Heavily influenced by the persistent vector available in Clojure. Initially this was more or\n less just a port of the Java code for the Clojure vector. It has since been modified and to\n some extent optimized for usage in Python.\n\n The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No\n updates are done to the original vector. Structural sharing between vectors are applied where possible to save\n space and to avoid making complete copies.\n\n This structure corresponds most closely to the built in list type and is intended as a replacement. Where the\n semantics are the same (more or less) the same function names have been used but for some cases it is not possible,\n for example assignments.\n\n The PVector implements the Sequence protocol and is Hashable.\n\n Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector.\n\n The following are examples of some common operations on persistent vectors:\n\n >>> p = v(1, 2, 3)\n >>> p2 = p.append(4)\n >>> p3 = p2.extend([5, 6, 7])\n >>> p\n pvector([1, 2, 3])\n >>> p2\n pvector([1, 2, 3, 4])\n >>> p3\n pvector([1, 2, 3, 4, 5, 6, 7])\n >>> p3[5]\n 6\n >>> p.set(1, 99)\n pvector([1, 99, 3])\n >>>\n " + + @abstractmethod + def __len__(self): + """ + >>> len(v(1, 2, 3)) + 3 + """ + pass + + @abstractmethod + def __getitem__(self, index): + """ + Get value at index. Full slicing support. + + >>> v1 = v(5, 6, 7, 8) + >>> v1[2] + 7 + >>> v1[1:3] + pvector([6, 7]) + """ + pass + + @abstractmethod + def __add__(self, other): + """ + >>> v1 = v(1, 2) + >>> v2 = v(3, 4) + >>> v1 + v2 + pvector([1, 2, 3, 4]) + """ + pass + + @abstractmethod + def __mul__(self, times): + """ + >>> v1 = v(1, 2) + >>> 3 * v1 + pvector([1, 2, 1, 2, 1, 2]) + """ + pass + + @abstractmethod + def __hash__(self): + """ + >>> v1 = v(1, 2, 3) + >>> v2 = v(1, 2, 3) + >>> hash(v1) == hash(v2) + True + """ + pass + + @abstractmethod + def evolver(self): + """ + Create a new evolver for this pvector. The evolver acts as a mutable view of the vector + with "transaction like" semantics. No part of the underlying vector i updated, it is still + fully immutable. Furthermore multiple evolvers created from the same pvector do not + interfere with each other. + + You may want to use an evolver instead of working directly with the pvector in the + following cases: + + * Multiple updates are done to the same vector and the intermediate results are of no + interest. In this case using an evolver may be a more efficient and easier to work with. + * You need to pass a vector into a legacy function or a function that you have no control + over which performs in place mutations of lists. In this case pass an evolver instance + instead and then create a new pvector from the evolver once the function returns. + + The following example illustrates a typical workflow when working with evolvers. It also + displays most of the API (which i kept small by design, you should not be tempted to + use evolvers in excess ;-)). + + Create the evolver and perform various mutating updates to it: + + >>> v1 = v(1, 2, 3, 4, 5) + >>> e = v1.evolver() + >>> e[1] = 22 + >>> _ = e.append(6) + >>> _ = e.extend([7, 8, 9]) + >>> e[8] += 1 + >>> len(e) + 9 + + The underlying pvector remains the same: + + >>> v1 + pvector([1, 2, 3, 4, 5]) + + The changes are kept in the evolver. An updated pvector can be created using the + persistent() function on the evolver. + + >>> v2 = e.persistent() + >>> v2 + pvector([1, 22, 3, 4, 5, 6, 7, 8, 10]) + + The new pvector will share data with the original pvector in the same way that would have + been done if only using operations on the pvector. + """ + pass + + @abstractmethod + def mset(self, *args): + """ + Return a new vector with elements in specified positions replaced by values (multi set). + + Elements on even positions in the argument list are interpreted as indexes while + elements on odd positions are considered values. + + >>> v1 = v(1, 2, 3) + >>> v1.mset(0, 11, 2, 33) + pvector([11, 2, 33]) + """ + pass + + @abstractmethod + def set(self, i, val): + """ + Return a new vector with element at position i replaced with val. The original vector remains unchanged. + + Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will + result in an IndexError. + + >>> v1 = v(1, 2, 3) + >>> v1.set(1, 4) + pvector([1, 4, 3]) + >>> v1.set(3, 4) + pvector([1, 2, 3, 4]) + >>> v1.set(-1, 4) + pvector([1, 2, 4]) + """ + pass + + @abstractmethod + def append(self, val): + """ + Return a new vector with val appended. + + >>> v1 = v(1, 2) + >>> v1.append(3) + pvector([1, 2, 3]) + """ + pass + + @abstractmethod + def extend(self, obj): + """ + Return a new vector with all values in obj appended to it. Obj may be another + PVector or any other Iterable. + + >>> v1 = v(1, 2, 3) + >>> v1.extend([4, 5]) + pvector([1, 2, 3, 4, 5]) + """ + pass + + @abstractmethod + def index(self, value, *args, **kwargs): + """ + Return first index of value. Additional indexes may be supplied to limit the search to a + sub range of the vector. + + >>> v1 = v(1, 2, 3, 4, 3) + >>> v1.index(3) + 2 + >>> v1.index(3, 3, 5) + 4 + """ + pass + + @abstractmethod + def count(self, value): + """ + Return the number of times that value appears in the vector. + + >>> v1 = v(1, 4, 3, 4) + >>> v1.count(4) + 2 + """ + pass + + @abstractmethod + def transform(self, *transformations): + """ + Transform arbitrarily complex combinations of PVectors and PMaps. A transformation + consists of two parts. One match expression that specifies which elements to transform + and one transformation function that performs the actual transformation. + + >>> from pyrsistent import freeze, ny + >>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'}, + ... {'author': 'Steve', 'content': 'A slightly longer article'}], + ... 'weather': {'temperature': '11C', 'wind': '5m/s'}}) + >>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c) + >>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c) + >>> very_short_news.articles[0].content + 'A short article' + >>> very_short_news.articles[1].content + 'A slightly long...' + + When nothing has been transformed the original data structure is kept + + >>> short_news is news_paper + True + >>> very_short_news is news_paper + False + >>> very_short_news.articles[0] is news_paper.articles[0] + True + """ + pass + + @abstractmethod + def delete(self, index, stop=None): + """ + Delete a portion of the vector by index or range. + + >>> v1 = v(1, 2, 3, 4, 5) + >>> v1.delete(1) + pvector([1, 3, 4, 5]) + >>> v1.delete(1, 3) + pvector([1, 4, 5]) + """ + pass + + @abstractmethod + def remove(self, value): + """ + Remove the first occurrence of a value from the vector. + + >>> v1 = v(1, 2, 3, 2, 1) + >>> v2 = v1.remove(1) + >>> v2 + pvector([2, 3, 2, 1]) + >>> v2.remove(1) + pvector([2, 3, 2]) + """ + pass + + +_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], []) +PVector.register(PythonPVector) +Sequence.register(PVector) +Hashable.register(PVector) + +def python_pvector(iterable=()): + """ + Create a new persistent vector containing the elements in iterable. + + >>> v1 = pvector([1, 2, 3]) + >>> v1 + pvector([1, 2, 3]) + """ + return _EMPTY_PVECTOR.extend(iterable) + + +try: + import os + if os.environ.get("PYRSISTENT_NO_C_EXTENSION"): + pvector = python_pvector + else: + from pvectorc import pvector + PVector.register(type(pvector())) +except ImportError: + pvector = python_pvector + +def v(*elements): + """ + Create a new persistent vector containing all parameters to this function. + + >>> v1 = v(1, 2, 3) + >>> v1 + pvector([1, 2, 3]) + """ + return pvector(elements) diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_toolz.py b/APPS_UNCOMPILED/lib/pyrsistent/_toolz.py new file mode 100644 index 0000000..8ac7892 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_toolz.py @@ -0,0 +1,88 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_toolz.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 3427 bytes +""" +Functionality copied from the toolz package to avoid having +to add toolz as a dependency. + +See https://github.com/pytoolz/toolz/. + +toolz is relased under BSD licence. Below is the licence text +from toolz as it appeared when copying the code. + +-------------------------------------------------------------- + +Copyright (c) 2013 Matthew Rocklin + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + a. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + b. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + c. Neither the name of toolz nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. +""" +import operator +from six.moves import reduce + +def get_in(keys, coll, default=None, no_default=False): + """ + NB: This is a straight copy of the get_in implementation found in + the toolz library (https://github.com/pytoolz/toolz/). It works + with persistent data structures as well as the corresponding + datastructures from the stdlib. + + Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys. + + If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless + ``no_default`` is specified, then it raises KeyError or IndexError. + + ``get_in`` is a generalization of ``operator.getitem`` for nested data + structures such as dictionaries and lists. + >>> from pyrsistent import freeze + >>> transaction = freeze({'name': 'Alice', + ... 'purchase': {'items': ['Apple', 'Orange'], + ... 'costs': [0.50, 1.25]}, + ... 'credit card': '5555-1234-1234-1234'}) + >>> get_in(['purchase', 'items', 0], transaction) + 'Apple' + >>> get_in(['name'], transaction) + 'Alice' + >>> get_in(['purchase', 'total'], transaction) + >>> get_in(['purchase', 'items', 'apple'], transaction) + >>> get_in(['purchase', 'items', 10], transaction) + >>> get_in(['purchase', 'total'], transaction, 0) + 0 + >>> get_in(['y'], {}, no_default=True) + Traceback (most recent call last): + ... + KeyError: 'y' + """ + try: + return reduce(operator.getitem, keys, coll) + except (KeyError, IndexError, TypeError): + if no_default: + raise + return default diff --git a/APPS_UNCOMPILED/lib/pyrsistent/_transformations.py b/APPS_UNCOMPILED/lib/pyrsistent/_transformations.py new file mode 100644 index 0000000..b8273c6 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/_transformations.py @@ -0,0 +1,130 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/_transformations.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 3910 bytes +import re, six +try: + from inspect import Parameter, signature +except ImportError: + signature = None + try: + from inspect import getfullargspec as getargspec + except ImportError: + from inspect import getargspec + +_EMPTY_SENTINEL = object() + +def inc(x): + """ Add one to the current value """ + return x + 1 + + +def dec(x): + """ Subtract one from the current value """ + return x - 1 + + +def discard(evolver, key): + """ Discard the element and returns a structure without the discarded elements """ + try: + del evolver[key] + except KeyError: + pass + + +def rex(expr): + """ Regular expression matcher to use together with transform functions """ + r = re.compile(expr) + return (lambda key: isinstance(key, six.string_types) and r.match(key)) + + +def ny(_): + """ Matcher that matches any value """ + return True + + +def _chunks(l, n): + for i in range(0, len(l), n): + yield l[i[:i + n]] + + +def transform(structure, transformations): + r = structure + for path, command in _chunks(transformations, 2): + r = _do_to_path(r, path, command) + + return r + + +def _do_to_path(structure, path, command): + if not path: + if callable(command): + return command(structure) + return command + kvs = _get_keys_and_values(structure, path[0]) + return _update_structure(structure, kvs, path[1[:None]], command) + + +def _items(structure): + try: + return structure.items() + except AttributeError: + return list(enumerate(structure)) + + +def _get(structure, key, default): + try: + if hasattr(structure, "__getitem__"): + return structure[key] + return getattr(structure, key) + except (IndexError, KeyError): + return default + + +def _get_keys_and_values(structure, key_spec): + if callable(key_spec): + arity = _get_arity(key_spec) + if arity == 1: + return [(k, v) for k, v in _items(structure) if key_spec(k)] + if arity == 2: + return [(k, v) for k, v in _items(structure) if key_spec(k, v)] + raise ValueError("callable in transform path must take 1 or 2 arguments") + return [ + ( + key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))] + + +if signature is None: + + def _get_arity(f): + argspec = getargspec(f) + return len(argspec.args) - len(argspec.defaults or ()) + + +else: + + def _get_arity(f): + return sum((1 for p in signature(f).parameters.values() if p.default is Parameter.empty if p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD))) + + +def _update_structure(structure, kvs, path, command): + from pyrsistent._pmap import pmap + e = structure.evolver() + if (path or command) is discard: + for k, v in reversed(kvs): + discard(e, k) + + else: + for k, v in kvs: + is_empty = False + if v is _EMPTY_SENTINEL: + is_empty = True + v = pmap() + result = _do_to_path(v, path, command) + if result is not v or is_empty: + e[k] = result + + return e.persistent() diff --git a/APPS_UNCOMPILED/lib/pyrsistent/typing.py b/APPS_UNCOMPILED/lib/pyrsistent/typing.py new file mode 100644 index 0000000..996c199 --- /dev/null +++ b/APPS_UNCOMPILED/lib/pyrsistent/typing.py @@ -0,0 +1,94 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/pyrsistent/typing.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 1767 bytes +"""Helpers for use with type annotation. + +Use the empty classes in this module when annotating the types of Pyrsistent +objects, instead of using the actual collection class. + +For example, + + from pyrsistent import pvector + from pyrsistent.typing import PVector + + myvector: PVector[str] = pvector(['a', 'b', 'c']) + +""" +from __future__ import absolute_import +try: + from typing import Container + from typing import Hashable + from typing import Generic + from typing import Iterable + from typing import Mapping + from typing import Sequence + from typing import Sized + from typing import TypeVar + __all__ = [ + 'CheckedPMap', + 'CheckedPSet', + 'CheckedPVector', + 'PBag', + 'PDeque', + 'PList', + 'PMap', + 'PSet', + 'PVector'] + T = TypeVar("T") + KT = TypeVar("KT") + VT = TypeVar("VT") + + class CheckedPMap(Mapping[(KT, VT)], Hashable): + pass + + + class CheckedPSet(Generic[T], Hashable): + pass + + + class CheckedPVector(Sequence[T], Hashable): + pass + + + class PBag(Container[T], Iterable[T], Sized, Hashable): + pass + + + class PDeque(Sequence[T], Hashable): + pass + + + class PList(Sequence[T], Hashable): + pass + + + class PMap(Mapping[(KT, VT)], Hashable): + pass + + + class PSet(Generic[T], Hashable): + pass + + + class PVector(Sequence[T], Hashable): + pass + + + class PVectorEvolver(Generic[T]): + pass + + + class PMapEvolver(Generic[(KT, VT)]): + pass + + + class PSetEvolver(Generic[T]): + pass + + +except ImportError: + pass diff --git a/APPS_UNCOMPILED/lib/serpent.py b/APPS_UNCOMPILED/lib/serpent.py new file mode 100644 index 0000000..099078f --- /dev/null +++ b/APPS_UNCOMPILED/lib/serpent.py @@ -0,0 +1,506 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/serpent.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 20146 bytes +""" +ast.literal_eval() compatible object tree serialization. + +Serpent serializes an object tree into bytes (utf-8 encoded string) that can +be decoded and then passed as-is to ast.literal_eval() to rebuild it as the +original object tree. As such it is safe to send serpent data to other +machines over the network for instance (because only 'safe' literals are +encoded). + +Compatible with recent Python 3 versions + +Serpent handles several special Python types to make life easier: + + - bytes, bytearrays, memoryview --> string, base-64 + (you'll have to manually un-base64 them though) + - uuid.UUID, datetime.{datetime, date, time, timespan} --> appropriate string/number + - decimal.Decimal --> string (to not lose precision) + - array.array typecode 'u' --> string + - array.array other typecode --> list + - Exception --> dict with some fields of the exception (message, args) + - collections module types --> mostly equivalent primitive types or dict + - enums --> the value of the enum + - all other types --> dict with __getstate__ or vars() of the object + +Notes: + +The serializer is not thread-safe. Make sure you're not making changes +to the object tree that is being serialized, and don't use the same +serializer in different threads. + +Because the serialized format is just valid Python source code, it can +contain comments. + +Floats +inf and -inf are handled via a trick, Float 'nan' cannot be handled +and is represented by the special value: {'__class__':'float','value':'nan'} +We chose not to encode it as just the string 'NaN' because that could cause +memory issues when used in multiplications. + +Copyright by Irmen de Jong (irmen@razorvine.net) +Software license: "MIT software license". See http://opensource.org/licenses/MIT +""" +import ast, base64, sys, gc, decimal, datetime, uuid, array, math, numbers, codecs, collections, enum +from collections.abc import KeysView, ValuesView, ItemsView +__version__ = "1.41" +__all__ = ['dump', 'dumps', 'load', 'loads', 'register_class', 'unregister_class', 'tobytes'] + +def dumps(obj, indent=False, module_in_classname=False, bytes_repr=False): + """ + Serialize object tree to bytes. + indent = indent the output over multiple lines (default=false) + module_in_classname = include module prefix for class names or only use the class name itself + bytes_repr = should the bytes literal value representation be used instead of base-64 encoding for bytes types? + """ + return Serializer(indent, module_in_classname, bytes_repr).serialize(obj) + + +def dump(obj, file, indent=False, module_in_classname=False, bytes_repr=False): + """ + Serialize object tree to a file. + indent = indent the output over multiple lines (default=false) + module_in_classname = include module prefix for class names or only use the class name itself + bytes_repr = should the bytes literal value representation be used instead of base-64 encoding for bytes types? + """ + file.write(dumps(obj, indent=indent, module_in_classname=module_in_classname, bytes_repr=bytes_repr)) + + +def loads(serialized_bytes): + """Deserialize bytes back to object tree. Uses ast.literal_eval (safe).""" + serialized = codecs.decode(serialized_bytes, "utf-8") + if "\x00" in serialized: + raise ValueError("The serpent data contains 0-bytes so it cannot be parsed by ast.literal_eval. Has it been corrupted?") + try: + gc.disable() + return ast.literal_eval(serialized) + finally: + gc.enable() + + +def load(file): + """Deserialize bytes from a file back to object tree. Uses ast.literal_eval (safe).""" + data = file.read() + return loads(data) + + +def _ser_OrderedDict(obj, serializer, outputstream, indentlevel): + obj = {'__class__':"collections.OrderedDict" if (serializer.module_in_classname) else "OrderedDict", + 'items':list(obj.items())} + serializer._serialize(obj, outputstream, indentlevel) + + +def _ser_DictView(obj, serializer, outputstream, indentlevel): + serializer.ser_builtins_list(obj, outputstream, indentlevel) + + +_special_classes_registry = collections.OrderedDict() + +def _reset_special_classes_registry(): + _special_classes_registry.clear() + _special_classes_registry[KeysView] = _ser_DictView + _special_classes_registry[ValuesView] = _ser_DictView + _special_classes_registry[ItemsView] = _ser_DictView + _special_classes_registry[collections.OrderedDict] = _ser_OrderedDict + + def _ser_Enum(obj, serializer, outputstream, indentlevel): + serializer._serialize(obj.value, outputstream, indentlevel) + + _special_classes_registry[enum.Enum] = _ser_Enum + + +_reset_special_classes_registry() + +def unregister_class(clazz): + """Unregister the specialcase serializer for the given class.""" + if clazz in _special_classes_registry: + del _special_classes_registry[clazz] + + +def register_class(clazz, serializer): + """ + Register a special serializer function for objects of the given class. + The function will be called with (object, serpent_serializer, outputstream, indentlevel) arguments. + The function must write the serialized data to outputstream. It doesn't return a value. + """ + _special_classes_registry[clazz] = serializer + + +_repr_types = { + str, int, bool, type(None)} +_translate_types = {(collections.deque): list, + (collections.UserDict): dict, + (collections.UserList): list, + (collections.UserString): str} +_bytes_types = ( + bytes, bytearray, memoryview) + +def _translate_byte_type(t, data, bytes_repr): + if bytes_repr: + if t == bytes: + return repr(data) + if t == bytearray: + return repr(bytes(data)) + if t == memoryview: + return repr(bytes(data)) + raise TypeError("invalid bytes type") + else: + b64 = base64.b64encode(data) + return repr({'data':b64 if (type(b64) is str) else (b64.decode("ascii")), + 'encoding':"base64"}) + + +def tobytes(obj): + """ + Utility function to convert obj back to actual bytes if it is a serpent-encoded bytes dictionary + (a dict with base-64 encoded 'data' in it and 'encoding'='base64'). + If obj is already bytes or a byte-like type, return obj unmodified. + Will raise TypeError if obj is none of the above. + + All this is not required if you called serpent with 'bytes_repr' set to True, since Serpent 1.40 + that can be used to directly encode bytes into the bytes literal value representation. + That will be less efficient than the default base-64 encoding though, but it's a bit more convenient. + """ + if isinstance(obj, _bytes_types): + return obj + if isinstance(obj, dict): + if "data" in obj: + if obj.get("encoding") == "base64": + try: + return base64.b64decode(obj["data"]) + except TypeError: + return base64.b64decode(obj["data"].encode("ascii")) + + raise TypeError("argument is neither bytes nor serpent base64 encoded bytes dict") + + +class Serializer(object): + __doc__ = "\n Serialize an object tree to a byte stream.\n It is not thread-safe: make sure you're not making changes to the\n object tree that is being serialized, and don't use the same serializer\n across different threads.\n " + dispatch = {} + + def __init__(self, indent=False, module_in_classname=False, bytes_repr=False): + """ + Initialize the serializer. + indent=indent the output over multiple lines (default=false) + module_in_classname = include module prefix for class names or only use the class name itself + bytes_repr = should the bytes literal value representation be used instead of base-64 encoding for bytes types? + """ + self.indent = indent + self.module_in_classname = module_in_classname + self.serialized_obj_ids = set() + self.special_classes_registry_copy = None + self.maximum_level = min(sys.getrecursionlimit() // 5, 1000) + self.bytes_repr = bytes_repr + + def serialize(self, obj): + """Serialize the object tree to bytes.""" + self.special_classes_registry_copy = _special_classes_registry.copy() + header = "# serpent utf-8 python3.2\n" + out = [header] + try: + gc.disable() + self.serialized_obj_ids = set() + self._serialize(obj, out, 0) + finally: + gc.enable() + + self.special_classes_registry_copy = None + del self.serialized_obj_ids + return "".join(out).encode("utf-8") + + _shortcut_dispatch_types = { + float, complex, tuple, list, dict, set, frozenset} + + def _serialize(self, obj, out, level): + if level > self.maximum_level: + raise ValueError("Object graph nesting too deep. Increase serializer.maximum_level if you think you need more, but this may cause a RecursionError instead if Python's recursion limit doesn't allow it.") + else: + t = type(obj) + if t in _bytes_types: + out.append(_translate_byte_type(t, obj, self.bytes_repr)) + return + if t in _translate_types: + obj = _translate_types[t](obj) + t = type(obj) + if t in _repr_types: + out.append(repr(obj)) + return + if t in self._shortcut_dispatch_types: + return self.dispatch[t](self, obj, out, level) + special_classes = self.special_classes_registry_copy + for clazz in special_classes: + if isinstance(obj, clazz): + special_classes[clazz](obj, self, out, level) + return + + try: + func = self.dispatch[t] + except KeyError: + for type_ in t.__mro__: + if type_ in self.dispatch: + func = self.dispatch[type_] + break + else: + func = Serializer.ser_default_class + + func(self, obj, out, level) + + def ser_builtins_float(self, float_obj, out, level): + if math.isnan(float_obj): + out.append("{'__class__':'float','value':'nan'}") + else: + if math.isinf(float_obj): + if float_obj > 0: + out.append("1e30000") + else: + out.append("-1e30000") + else: + out.append(repr(float_obj)) + + dispatch[float] = ser_builtins_float + + def ser_builtins_complex(self, complex_obj, out, level): + out.append("(") + self.ser_builtins_float(complex_obj.real, out, level) + if complex_obj.imag >= 0: + out.append("+") + self.ser_builtins_float(complex_obj.imag, out, level) + out.append("j)") + + dispatch[complex] = ser_builtins_complex + + def ser_builtins_tuple(self, tuple_obj, out, level): + append = out.append + serialize = self._serialize + if self.indent and tuple_obj: + indent_chars = " " * level + indent_chars_inside = indent_chars + " " + append("(\n") + for elt in tuple_obj: + append(indent_chars_inside) + serialize(elt, out, level + 1) + append(",\n") + + out[-1] = out[-1].rstrip() + if len(tuple_obj) > 1: + del out[-1] + append("\n" + indent_chars + ")") + else: + append("(") + for elt in tuple_obj: + serialize(elt, out, level + 1) + append(",") + + if len(tuple_obj) > 1: + del out[-1] + append(")") + + dispatch[tuple] = ser_builtins_tuple + + def ser_builtins_list(self, list_obj, out, level): + if id(list_obj) in self.serialized_obj_ids: + raise ValueError("Circular reference detected (list)") + else: + self.serialized_obj_ids.add(id(list_obj)) + append = out.append + serialize = self._serialize + if self.indent and list_obj: + indent_chars = " " * level + indent_chars_inside = indent_chars + " " + append("[\n") + for elt in list_obj: + append(indent_chars_inside) + serialize(elt, out, level + 1) + append(",\n") + + del out[-1] + append("\n" + indent_chars + "]") + else: + append("[") + for elt in list_obj: + serialize(elt, out, level + 1) + append(",") + + if list_obj: + del out[-1] + append("]") + self.serialized_obj_ids.discard(id(list_obj)) + + dispatch[list] = ser_builtins_list + + def _check_hashable_type(self, t): + if t not in (bool, bytes, str, tuple): + if not issubclass(t, numbers.Number): + if issubclass(t, enum.Enum): + return + raise TypeError("one of the keys in a dict or set is not of a primitive hashable type: " + str(t) + ". Use simple types as keys or use a list or tuple as container.") + + def ser_builtins_dict(self, dict_obj, out, level): + if id(dict_obj) in self.serialized_obj_ids: + raise ValueError("Circular reference detected (dict)") + else: + self.serialized_obj_ids.add(id(dict_obj)) + append = out.append + serialize = self._serialize + if self.indent and dict_obj: + indent_chars = " " * level + indent_chars_inside = indent_chars + " " + append("{\n") + dict_items = dict_obj.items() + try: + sorted_items = sorted(dict_items) + except TypeError: + sorted_items = dict_items + + for key, value in sorted_items: + append(indent_chars_inside) + self._check_hashable_type(type(key)) + serialize(key, out, level + 1) + append(": ") + serialize(value, out, level + 1) + append(",\n") + + del out[-1] + append("\n" + indent_chars + "}") + else: + append("{") + for key, value in dict_obj.items(): + self._check_hashable_type(type(key)) + serialize(key, out, level + 1) + append(":") + serialize(value, out, level + 1) + append(",") + + if dict_obj: + del out[-1] + append("}") + self.serialized_obj_ids.discard(id(dict_obj)) + + dispatch[dict] = ser_builtins_dict + + def ser_builtins_set(self, set_obj, out, level): + append = out.append + serialize = self._serialize + if self.indent and set_obj: + indent_chars = " " * level + indent_chars_inside = indent_chars + " " + append("{\n") + try: + sorted_elts = sorted(set_obj) + except TypeError: + sorted_elts = set_obj + + for elt in sorted_elts: + append(indent_chars_inside) + self._check_hashable_type(type(elt)) + serialize(elt, out, level + 1) + append(",\n") + + del out[-1] + append("\n" + indent_chars + "}") + else: + if set_obj: + append("{") + for elt in set_obj: + self._check_hashable_type(type(elt)) + serialize(elt, out, level + 1) + append(",") + + del out[-1] + append("}") + else: + self.ser_builtins_tuple((), out, level) + + dispatch[set] = ser_builtins_set + + def ser_builtins_frozenset(self, set_obj, out, level): + self.ser_builtins_set(set_obj, out, level) + + dispatch[frozenset] = ser_builtins_set + + def ser_decimal_Decimal(self, decimal_obj, out, level): + out.append(repr(str(decimal_obj))) + + dispatch[decimal.Decimal] = ser_decimal_Decimal + + def ser_datetime_datetime(self, datetime_obj, out, level): + out.append(repr(datetime_obj.isoformat())) + + dispatch[datetime.datetime] = ser_datetime_datetime + + def ser_datetime_date(self, date_obj, out, level): + out.append(repr(date_obj.isoformat())) + + dispatch[datetime.date] = ser_datetime_date + + def ser_datetime_timedelta(self, timedelta_obj, out, level): + secs = timedelta_obj.total_seconds() + out.append(repr(secs)) + + dispatch[datetime.timedelta] = ser_datetime_timedelta + + def ser_datetime_time(self, time_obj, out, level): + out.append(repr(str(time_obj))) + + dispatch[datetime.time] = ser_datetime_time + + def ser_uuid_UUID(self, uuid_obj, out, level): + out.append(repr(str(uuid_obj))) + + dispatch[uuid.UUID] = ser_uuid_UUID + + def ser_exception_class(self, exc_obj, out, level): + value = {'__class__':(self.get_class_name)(exc_obj), + '__exception__':True, + 'args':exc_obj.args, + 'attributes':vars(exc_obj)} + self._serialize(value, out, level) + + dispatch[BaseException] = ser_exception_class + + def ser_array_array(self, array_obj, out, level): + if array_obj.typecode == "u": + self._serialize(array_obj.tounicode(), out, level) + else: + self._serialize(array_obj.tolist(), out, level) + + dispatch[array.array] = ser_array_array + + def ser_default_class(self, obj, out, level): + if id(obj) in self.serialized_obj_ids: + raise ValueError("Circular reference detected (class)") + self.serialized_obj_ids.add(id(obj)) + try: + has_own_getstate = hasattr(type(obj), "__getstate__") and type(obj).__getstate__ is not getattr(object, "__getstate__", None) + if has_own_getstate: + value = obj.__getstate__() + if isinstance(value, dict): + self.ser_builtins_dict(value, out, level) + return + else: + try: + value = dict(vars(obj)) + value["__class__"] = self.get_class_name(obj) + except TypeError: + if hasattr(obj, "__slots__"): + value = {} + for slot in obj.__slots__: + value[slot] = getattr(obj, slot) + + value["__class__"] = self.get_class_name(obj) + else: + raise TypeError("don't know how to serialize class " + str(obj.__class__) + ". Give it vars() or an appropriate __getstate__") + + self._serialize(value, out, level) + finally: + self.serialized_obj_ids.discard(id(obj)) + + def get_class_name(self, obj): + if self.module_in_classname: + return "%s.%s" % (obj.__class__.__module__, obj.__class__.__name__) + return obj.__class__.__name__ diff --git a/APPS_UNCOMPILED/lib/sftpFunc.py b/APPS_UNCOMPILED/lib/sftpFunc.py new file mode 100644 index 0000000..974e712 --- /dev/null +++ b/APPS_UNCOMPILED/lib/sftpFunc.py @@ -0,0 +1,105 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/sftpFunc.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 3267 bytes +import ctypes +from common.Logger import logger +sftpFunc = ctypes.cdll.LoadLibrary("libsftpFunc.so") + +def split_string_into_list(s): + return s.split("\n") + + +def print_log(result): + if result == 0: + logger.debug("Success") + else: + if result == 1000: + logger.error("get connect info error") + else: + if result == 1001: + logger.error("libssh2 init failed") + else: + if result == 1002: + logger.error("socket failed") + else: + if result == 1003: + logger.error("connect sftp server failed") + else: + if result == 1004: + logger.error("libssh2 session init failed") + else: + if result == 1005: + logger.error("libssh2 sftp init failed") + else: + if result == 1006: + logger.error("Unable to open remote file with SFTP") + else: + if result == 1007: + logger.error("Unable to open local file") + else: + if result == 1008: + logger.error("upload_file bytes_written != bytes_read") + else: + if result == 1009: + logger.error("Unable to delete file with SFTP") + else: + logger.error("Failure establishing SSH session: %d" % result) + + +def set_sftpConnect_info(AuthType, hostname, port=22, name=None, passwd='123456'): + sftpFunc.set_connect_info.argtypes = [ + ctypes.c_int, ctypes.c_char_p, ctypes.c_int, ctypes.c_char_p, ctypes.c_char_p] + sftpFunc.set_connect_info.restype = ctypes.c_int + result = sftpFunc.set_connect_info(AuthType, hostname.encode("utf-8"), port, name.encode("utf-8"), passwd.encode("utf-8")) + print_log(result) + return result + + +def sftp_upload_file(remotename, localname): + sftpFunc.upload_file.argtypes = [ + ctypes.c_char_p, ctypes.c_char_p] + sftpFunc.upload_file.restype = ctypes.c_int + result = sftpFunc.upload_file(remotename.encode("utf-8"), localname.encode("utf-8")) + print_log(result) + return result + + +def sftp_download_file(remotename, localname): + sftpFunc.download_file.argtypes = [ + ctypes.c_char_p, ctypes.c_char_p] + sftpFunc.download_file.restype = ctypes.c_int + result = sftpFunc.download_file(remotename.encode("utf-8"), localname.encode("utf-8")) + print_log(result) + return result + + +def sftp_remove_file(remotename): + sftpFunc.remove_file.argtypes = [ + ctypes.c_char_p] + sftpFunc.remove_file.restype = ctypes.c_int + result = sftpFunc.remove_file(remotename.encode("utf-8")) + print_log(result) + return result + + +def sftp_scan_directory(dirpath): + buffer_size = 1048576 + buffer = ctypes.create_string_buffer(buffer_size) + err_result = ['libssh2 init failed', 'socket failed', 'connect sftp server failed', 'libssh2 session init failed', + 'Failure establishing SSH session:', + 'libssh2 sftp init failed', 'Unable to open dir with SFTP'] + sftpFunc.scan_directory.argtypes = [ + ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int] + sftpFunc.scan_directory.restype = ctypes.c_char_p + result = sftpFunc.scan_directory(dirpath.encode("utf-8"), buffer, buffer_size) + if any((error in result.decode() for error in err_result)): + logger.error(result.decode()) + else: + dir_list = [x.decode() for x in result.split(b'\n') if x.decode() not in ('.', + '..') if len(x.decode()) > 0] + logger.debug(dir_list) + return dir_list diff --git a/APPS_UNCOMPILED/lib/six.py b/APPS_UNCOMPILED/lib/six.py new file mode 100644 index 0000000..f9cd302 --- /dev/null +++ b/APPS_UNCOMPILED/lib/six.py @@ -0,0 +1,914 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/six.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 34159 bytes +"""Utilities for writing code that runs on Python 2 and 3""" +from __future__ import absolute_import +import functools, itertools, operator, sys, types +__author__ = "Benjamin Peterson " +__version__ = "1.15.0" +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0[:2]] >= (3, 4) +if PY3: + string_types = ( + str,) + integer_types = (int,) + class_types = (type,) + text_type = str + binary_type = bytes + MAXSIZE = sys.maxsize +else: + string_types = ( + basestring,) + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + if sys.platform.startswith("java"): + MAXSIZE = int(2147483647) + else: + + class X(object): + + def __len__(self): + return 2147483648L + + + try: + len(X()) + except OverflowError: + MAXSIZE = int(2147483647) + else: + MAXSIZE = int(9223372036854775807L) + del X + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) + try: + delattr(obj.__class__, self.name) + except AttributeError: + pass + + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = [ + "__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + else: + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + __doc__ = "\n A meta path importer to import six.moves and its submodules.\n\n This class implements a PEP302 finder and loader. It should be compatible\n with Python 2.5 and all existing versions of Python3\n " + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + return sys.modules[fullname] + except KeyError: + pass + + mod = self._SixMetaPathImporter__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self._SixMetaPathImporter__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self._SixMetaPathImporter__get_module(fullname) + + get_source = get_code + + +_importer = _SixMetaPathImporter(__name__) + +class _MovedItems(_LazyModule): + __doc__ = "Lazy loading of moved objects" + __path__ = [] + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, + 3) else "collections"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, + 9) else "_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server")] +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg")] +else: + for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) + + del attr + _MovedItems._moved_attributes = _moved_attributes + moves = _MovedItems(__name__ + ".moves") + _importer._add_module(moves, "moves") + + class Module_six_moves_urllib_parse(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_parse" + + + _urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse")] + for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) + + del attr + Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") + + class Module_six_moves_urllib_error(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_error" + + + _urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error")] + for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) + + del attr + Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") + + class Module_six_moves_urllib_request(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_request" + + + _urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request")] + for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) + + del attr + Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") + + class Module_six_moves_urllib_response(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_response" + + + _urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response")] + for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) + + del attr + Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") + + class Module_six_moves_urllib_robotparser(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_robotparser" + + + _urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser")] + for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) + + del attr + Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") + + class Module_six_moves_urllib(types.ModuleType): + __doc__ = "Create a six.moves.urllib namespace that resembles the Python 3 namespace" + __path__ = [] + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return [ + 'parse', 'error', 'request', 'response', + 'robotparser'] + + + _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") + + def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + + def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + + if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" + else: + _meth_func = "im_func" + _meth_self = "im_self" + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + try: + advance_iterator = next + except NameError: + + def advance_iterator(it): + return it.next() + + + next = advance_iterator + try: + callable = callable + except NameError: + + def callable(obj): + return any(("__call__" in klass.__dict__ for klass in type(obj).__mro__)) + + + if PY3: + + def get_unbound_function(unbound): + return unbound + + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + + Iterator = object + else: + + def get_unbound_function(unbound): + return unbound.im_func + + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + + callable = callable + _add_doc(get_unbound_function, "Get the function out of a possibly unbound function") + get_method_function = operator.attrgetter(_meth_func) + get_method_self = operator.attrgetter(_meth_self) + get_function_closure = operator.attrgetter(_func_closure) + get_function_code = operator.attrgetter(_func_code) + get_function_defaults = operator.attrgetter(_func_defaults) + get_function_globals = operator.attrgetter(_func_globals) + if PY3: + + def iterkeys(d, **kw): + return iter((d.keys)(**kw)) + + + def itervalues(d, **kw): + return iter((d.values)(**kw)) + + + def iteritems(d, **kw): + return iter((d.items)(**kw)) + + + def iterlists(d, **kw): + return iter((d.lists)(**kw)) + + + viewkeys = operator.methodcaller("keys") + viewvalues = operator.methodcaller("values") + viewitems = operator.methodcaller("items") + else: + + def iterkeys(d, **kw): + return (d.iterkeys)(**kw) + + + def itervalues(d, **kw): + return (d.itervalues)(**kw) + + + def iteritems(d, **kw): + return (d.iteritems)(**kw) + + + def iterlists(d, **kw): + return (d.iterlists)(**kw) + + + viewkeys = operator.methodcaller("viewkeys") + viewvalues = operator.methodcaller("viewvalues") + viewitems = operator.methodcaller("viewitems") + _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") + _add_doc(itervalues, "Return an iterator over the values of a dictionary.") + _add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") + _add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") + if PY3: + + def b(s): + return s.encode("latin-1") + + + def u(s): + return s + + + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" + else: + + def b(s): + return s + + + def u(s): + return unicode(s.replace("\\\\", "\\\\\\\\"), "unicode_escape") + + + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + + def indexbytes(buf, i): + return ord(buf[i]) + + + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + _add_doc(b, "Byte literal") + _add_doc(u, "Text literal") + + def assertCountEqual(self, *args, **kwargs): + return (getattr(self, _assertCountEqual))(*args, **kwargs) + + + def assertRaisesRegex(self, *args, **kwargs): + return (getattr(self, _assertRaisesRegex))(*args, **kwargs) + + + def assertRegex(self, *args, **kwargs): + return (getattr(self, _assertRegex))(*args, **kwargs) + + + def assertNotRegex(self, *args, **kwargs): + return (getattr(self, _assertNotRegex))(*args, **kwargs) + + + if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + + + else: + + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + else: + if _locs_ is None: + _locs_ = _globs_ + exec("exec _code_ in _globs_, _locs_") + + + exec_("def reraise(tp, value, tb=None):\n try:\n raise tp, value, tb\n finally:\n tb = None\n") + if sys.version_info[None[:2]] > (3, ): + exec_("def raise_from(value, from_value):\n try:\n raise value from from_value\n finally:\n value = None\n") + else: + + def raise_from(value, from_value): + raise value + + + print_ = getattr(moves.builtins, "print", None) + if print_ is None: + + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + elif isinstance(fp, file): + if isinstance(data, unicode) and fp.encoding is not None: + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + else: + if not isinstance(sep, str): + raise TypeError("sep must be None or a string") + else: + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + else: + if not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + + write(end) + + + if sys.version_info[None[:2]] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush: + if fp is not None: + fp.flush() + + + _add_doc(reraise, "Reraise an exception.") + if sys.version_info[0[:2]] < (3, 4): + + def _update_wrapper(wrapper, wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + + wrapper.__wrapped__ = wrapped + return wrapper + + + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): + return functools.partial(_update_wrapper, wrapped=wrapped, assigned=assigned, + updated=updated) + + + wraps.__doc__ = functools.wraps.__doc__ + else: + wraps = functools.wraps + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[None[:2]] >= (3, 7): + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d["__orig_bases__"] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + + return type.__new__(metaclass, "temporary_class", (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get("__slots__") + if slots is not None: + if isinstance(slots, str): + slots = [ + slots] + for slots_var in slots: + orig_vars.pop(slots_var) + + orig_vars.pop("__dict__", None) + orig_vars.pop("__weakref__", None) + if hasattr(cls, "__qualname__"): + orig_vars["__qualname__"] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if type(s) is str: + return s + if PY2: + if isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3: + if isinstance(s, binary_type): + return s.decode(encoding, errors) + assert isinstance(s, (text_type, binary_type)), "not expecting type '%s'" % type(s) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + if isinstance(s, text_type): + return s + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if "__str__" not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__()." % klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode("utf-8") + return klass + + +__path__ = [] +__package__ = __name__ +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + if type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__: + del sys.meta_path[i] + break + + del i + del importer +sys.meta_path.append(_importer) diff --git a/APPS_UNCOMPILED/lib/snap7/__init__.py b/APPS_UNCOMPILED/lib/snap7/__init__.py new file mode 100644 index 0000000..c8f2964 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/__init__.py @@ -0,0 +1,22 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/__init__.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 396 bytes +""" +The Snap7 Python library. +""" +import pkg_resources +import snap7.server as server +import snap7.client as client +import snap7.error as error +import snap7.snap7types as types +import snap7.common as common +import snap7.util as util +import snap7.logo as logo +try: + __version__ = pkg_resources.require("snap7")[0].version +except pkg_resources.DistributionNotFound: + __version__ = "0.0rc0" diff --git a/APPS_UNCOMPILED/lib/snap7/bin/__init__.py b/APPS_UNCOMPILED/lib/snap7/bin/__init__.py new file mode 100644 index 0000000..fe76c8e --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/bin/__init__.py @@ -0,0 +1,8 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/bin/__init__.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 20 bytes +__author__ = "gijs" diff --git a/APPS_UNCOMPILED/lib/snap7/bin/snap7-server.py b/APPS_UNCOMPILED/lib/snap7/bin/snap7-server.py new file mode 100644 index 0000000..3db738c --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/bin/snap7-server.py @@ -0,0 +1,44 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/bin/snap7-server.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 1338 bytes +""" +This is an example snap7 server. It doesn't do much, but accepts +connection. Useful for running the python-snap7 test suite. +""" +import time, logging, snap7, sys +logging.basicConfig() +logger = logging.getLogger() +logger.setLevel(logging.INFO) +tcpport = 1102 + +def mainloop(): + server = snap7.server.Server() + size = 100 + DBdata = (snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte] * size)() + PAdata = (snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte] * size)() + TMdata = (snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte] * size)() + CTdata = (snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte] * size)() + server.register_area(snap7.snap7types.srvAreaDB, 1, DBdata) + server.register_area(snap7.snap7types.srvAreaPA, 1, PAdata) + server.register_area(snap7.snap7types.srvAreaTM, 1, TMdata) + server.register_area(snap7.snap7types.srvAreaCT, 1, CTdata) + server.start(tcpport=tcpport) + while True: + while True: + event = server.pick_event() + if event: + logger.info(server.event_text(event)) + else: + break + + time.sleep(1) + + +if __name__ == "__main__": + if len(sys.argv) > 1: + snap7.common.load_library(sys.argv[1]) + mainloop() diff --git a/APPS_UNCOMPILED/lib/snap7/client.py b/APPS_UNCOMPILED/lib/snap7/client.py new file mode 100644 index 0000000..3aa72a5 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/client.py @@ -0,0 +1,592 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/client.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 22718 bytes +""" +Snap7 client used for connection to a siemens7 server. +""" +import re, ctypes +from ctypes import c_int, c_char_p, byref, sizeof, c_uint16, c_int32, c_byte +from ctypes import c_void_p +from datetime import datetime +import logging, snap7 +from snap7 import six +from snap7.snap7types import S7Object, buffer_type, buffer_size, BlocksList +from snap7.snap7types import TS7BlockInfo, param_types, cpu_statuses +from snap7.common import check_error, load_library, ipv4 +from snap7.snap7exceptions import Snap7Exception +logger = logging.getLogger(__name__) + +def error_wrap(func): + """Parses a s7 error code returned the decorated function.""" + + def f(*args, **kw): + code = func(*args, **kw) + check_error(code, context="client") + + return f + + +class Client(object): + __doc__ = "\n A snap7 client\n " + pointer = None + library = None + + def __init__(self): + self.library = load_library() + self.create() + + def __del__(self): + self.destroy() + + def create(self): + """ + create a SNAP7 client. + """ + logger.info("creating snap7 client") + self.library.Cli_Create.restype = c_void_p + self.pointer = S7Object(self.library.Cli_Create()) + + def destroy(self): + """ + destroy a client. + """ + logger.info("destroying snap7 client") + if self.library: + return self.library.Cli_Destroy(byref(self.pointer)) + + def plc_stop(self): + """ + stops a client + """ + logger.info("stopping plc") + return self.library.Cli_PlcStop(self.pointer) + + def plc_cold_start(self): + """ + cold starts a client + """ + logger.info("cold starting plc") + return self.library.Cli_PlcColdStart(self.pointer) + + def plc_hot_start(self): + """ + hot starts a client + """ + logger.info("hot starting plc") + return self.library.Cli_PlcHotStart(self.pointer) + + def get_cpu_state(self): + """ + Retrieves CPU state from client + """ + state = c_int(0) + self.library.Cli_GetPlcStatus(self.pointer, byref(state)) + try: + status_string = cpu_statuses[state.value] + except KeyError: + status_string = None + + if not status_string: + raise Snap7Exception("The cpu state (%s) is invalid" % state.value) + logger.debug("CPU state is %s" % status_string) + return status_string + + def get_cpu_info(self): + """ + Retrieves CPU info from client + """ + info = snap7.snap7types.S7CpuInfo() + result = self.library.Cli_GetCpuInfo(self.pointer, byref(info)) + check_error(result, context="client") + return info + + @error_wrap + def disconnect(self): + """ + disconnect a client. + """ + logger.info("disconnecting snap7 client") + return self.library.Cli_Disconnect(self.pointer) + + @error_wrap + def connect(self, address, rack, slot, tcpport=102): + """ + Connect to a S7 server. + + :param address: IP address of server + :param rack: rack on server + :param slot: slot on server. + """ + logger.info("connecting to %s:%s rack %s slot %s" % (address, tcpport, + rack, slot)) + self.set_param(snap7.snap7types.RemotePort, tcpport) + return self.library.Cli_ConnectTo(self.pointer, c_char_p(six.b(address)), c_int(rack), c_int(slot)) + + def connect_tsap(self, ip_address, local_tsap, remote_tsap, tcpport=102): + """ + Connect to a Siemens LOGO/CP243 server. Howto setup Logo communication configuration see: http://snap7.sourceforge.net/logo.html + + :param ip_address: IP ip_address of server + :param local_tsap: TSAP SNAP7 Client (e.g. 10.00 = 0x1000) + :param remote_tsap: TSAP Logo/CP243 Server (e.g. 20.00 = 0x2000) + """ + logger.info("connecting to %s:%s local_tsap %s remote_tsap %s" % (ip_address, tcpport, + local_tsap, remote_tsap)) + self.set_param(snap7.snap7types.RemotePort, tcpport) + self.set_connection_params(ip_address, local_tsap, remote_tsap) + result = self.library.Cli_Connect(self.pointer) + check_error(result, context="client") + return result + + def db_read(self, db_number, start, size): + """This is a lean function of Cli_ReadArea() to read PLC DB. + + :returns: user buffer. + """ + logger.debug("db_read, db_number:%s, start:%s, size:%s" % ( + db_number, start, size)) + type_ = snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte] + data = (type_ * size)() + result = self.library.Cli_DBRead(self.pointer, db_number, start, size, byref(data)) + check_error(result, context="client") + return bytearray(data) + + @error_wrap + def db_write(self, db_number, start, data): + """ + Writes to a DB object. + + :param start: write offset + :param data: bytearray + """ + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + size = len(data) + cdata = (type_ * size).from_buffer_copy(data) + logger.debug("db_write db_number:%s start:%s size:%s data:%s" % ( + db_number, start, size, data)) + return self.library.Cli_DBWrite(self.pointer, db_number, start, size, byref(cdata)) + + def delete(self, block_type, block_num): + """ + Deletes a block + + :param block_type: Type of block + :param block_num: Bloc number + """ + logger.info("deleting block") + blocktype = snap7.snap7types.block_types[block_type] + result = self.library.Cli_Delete(self.pointer, blocktype, block_num) + return result + + def full_upload(self, _type, block_num): + """ + Uploads a full block body from AG. + The whole block (including header and footer) is copied into the user + buffer. + + :param block_num: Number of Block + """ + _buffer = buffer_type() + size = c_int(sizeof(_buffer)) + block_type = snap7.snap7types.block_types[_type] + result = self.library.Cli_FullUpload(self.pointer, block_type, block_num, byref(_buffer), byref(size)) + check_error(result, context="client") + return (bytearray(_buffer), size.value) + + def upload(self, block_num): + """ + Uploads a block body from AG + + :param data: bytearray + """ + logger.debug("db_upload block_num: %s" % block_num) + block_type = snap7.snap7types.block_types["DB"] + _buffer = buffer_type() + size = c_int(sizeof(_buffer)) + result = self.library.Cli_Upload(self.pointer, block_type, block_num, byref(_buffer), byref(size)) + check_error(result, context="client") + logger.info("received %s bytes" % size) + return bytearray(_buffer) + + @error_wrap + def download(self, data, block_num=-1): + """ + Downloads a DB data into the AG. + A whole block (including header and footer) must be available into the + user buffer. + + :param block_num: New Block number (or -1) + :param data: the user buffer + """ + type_ = c_byte + size = len(data) + cdata = (type_ * len(data)).from_buffer_copy(data) + result = self.library.Cli_Download(self.pointer, block_num, byref(cdata), size) + return result + + def db_get(self, db_number): + """Uploads a DB from AG. + """ + logger.debug("db_get db_number: %s" % db_number) + _buffer = buffer_type() + result = self.library.Cli_DBGet(self.pointer, db_number, byref(_buffer), byref(c_int(buffer_size))) + check_error(result, context="client") + return bytearray(_buffer) + + def read_area(self, area, dbnumber, start, size): + """This is the main function to read data from a PLC. + With it you can read DB, Inputs, Outputs, Merkers, Timers and Counters. + + :param dbnumber: The DB number, only used when area= S7AreaDB + :param start: offset to start writing + :param size: number of units to read + """ + if not area in snap7.snap7types.areas.values(): + raise AssertionError + elif area == snap7.snap7types.S7AreaTM: + wordlen = snap7.snap7types.S7WLTimer + else: + if area == snap7.snap7types.S7AreaCT: + wordlen = snap7.snap7types.S7WLCounter + else: + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + logger.debug("reading area: %s dbnumber: %s start: %s: amount %s: wordlen: %s" % ( + area, dbnumber, start, size, wordlen)) + data = (type_ * size)() + result = self.library.Cli_ReadArea(self.pointer, area, dbnumber, start, size, wordlen, byref(data)) + check_error(result, context="client") + return bytearray(data) + + def set_debug_log(self, callback): + callback_func = ctypes.CFUNCTYPE(None, ctypes.c_int, ctypes.c_char_p) + self._callback = callback_func(callback) + return self.library.set_debug_log(self.pointer, self._callback) + + @error_wrap + def write_area(self, area, dbnumber, start, data): + """This is the main function to write data into a PLC. It's the + complementary function of Cli_ReadArea(), the parameters and their + meanings are the same. The only difference is that the data is + transferred from the buffer pointed by pUsrData into PLC. + + :param dbnumber: The DB number, only used when area= S7AreaDB + :param start: offset to start writing + :param data: a bytearray containing the payload + """ + if area == snap7.snap7types.S7AreaTM: + wordlen = snap7.snap7types.S7WLTimer + else: + if area == snap7.snap7types.S7AreaCT: + wordlen = snap7.snap7types.S7WLCounter + else: + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte] + size = len(data) + logger.debug("writing area: %s dbnumber: %s start: %s: size %s: wordlen %s type: %s" % ( + area, dbnumber, start, size, wordlen, type_)) + cdata = (type_ * len(data)).from_buffer_copy(data) + return self.library.Cli_WriteArea(self.pointer, area, dbnumber, start, size, wordlen, byref(cdata)) + + def read_multi_vars(self, items): + """This function read multiple variables from the PLC. + + :param items: list of S7DataItem objects + :returns: a tuple with the return code and a list of data items + """ + result = self.library.Cli_ReadMultiVars(self.pointer, byref(items), c_int32(len(items))) + check_error(result, context="client") + return (result, items) + + def list_blocks(self): + """Returns the AG blocks amount divided by type. + + :returns: a snap7.types.BlocksList object. + """ + logger.debug("listing blocks") + blocksList = BlocksList() + result = self.library.Cli_ListBlocks(self.pointer, byref(blocksList)) + check_error(result, context="client") + logger.debug("blocks: %s" % blocksList) + return blocksList + + def list_blocks_of_type(self, blocktype, size): + """This function returns the AG list of a specified block type.""" + blocktype = snap7.snap7types.block_types.get(blocktype) + if not blocktype: + raise Snap7Exception("The blocktype parameter was invalid") + logger.debug("listing blocks of type: %s size: %s" % ( + blocktype, size)) + if size == 0: + return 0 + data = (c_uint16 * size)() + count = c_int(size) + result = self.library.Cli_ListBlocksOfType(self.pointer, blocktype, byref(data), byref(count)) + logger.debug("number of items found: %s" % count) + check_error(result, context="client") + return data + + def get_block_info(self, blocktype, db_number): + """Returns the block information for the specified block.""" + blocktype = snap7.snap7types.block_types.get(blocktype) + if not blocktype: + raise Snap7Exception("The blocktype parameter was invalid") + logger.debug("retrieving block info for block %s of type %s" % ( + db_number, blocktype)) + data = TS7BlockInfo() + result = self.library.Cli_GetAgBlockInfo(self.pointer, blocktype, db_number, byref(data)) + check_error(result, context="client") + return data + + @error_wrap + def set_session_password(self, password): + """Send the password to the PLC to meet its security level.""" + assert len(password) <= 8, "maximum password length is 8" + return self.library.Cli_SetSessionPassword(self.pointer, c_char_p(six.b(password))) + + @error_wrap + def clear_session_password(self): + """Clears the password set for the current session (logout).""" + return self.library.Cli_ClearSessionPassword(self.pointer) + + def set_connection_params(self, address, local_tsap, remote_tsap): + """ + Sets internally (IP, LocalTSAP, RemoteTSAP) Coordinates. + This function must be called just before Cli_Connect(). + + :param address: PLC/Equipment IPV4 Address, for example "192.168.1.12" + :param local_tsap: Local TSAP (PC TSAP) + :param remote_tsap: Remote TSAP (PLC TSAP) + """ + assert re.match(ipv4, address), "%s is invalid ipv4" % address + result = self.library.Cli_SetConnectionParams(self.pointer, address.encode(), c_uint16(local_tsap), c_uint16(remote_tsap)) + if result != 0: + raise Snap7Exception("The parameter was invalid") + + def set_connection_type(self, connection_type): + """ + Sets the connection resource type, i.e the way in which the Clients + connects to a PLC. + + :param connection_type: 1 for PG, 2 for OP, 3 to 10 for S7 Basic + """ + result = self.library.Cli_SetConnectionType(self.pointer, c_uint16(connection_type)) + if result != 0: + raise Snap7Exception("The parameter was invalid") + + def get_connected(self): + """ + Returns the connection status + + :returns: a boolean that indicates if connected. + """ + connected = c_int32() + result = self.library.Cli_GetConnected(self.pointer, byref(connected)) + check_error(result, context="client") + return bool(connected) + + def ab_read(self, start, size): + """ + This is a lean function of Cli_ReadArea() to read PLC process outputs. + """ + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + data = (type_ * size)() + logger.debug("ab_read: start: %s: size %s: " % (start, size)) + result = self.library.Cli_ABRead(self.pointer, start, size, byref(data)) + check_error(result, context="client") + return bytearray(data) + + def ab_write(self, start, data): + """ + This is a lean function of Cli_WriteArea() to write PLC process + outputs + """ + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + size = len(data) + cdata = (type_ * size).from_buffer_copy(data) + logger.debug("ab write: start: %s: size: %s: " % (start, size)) + return self.library.Cli_ABWrite(self.pointer, start, size, byref(cdata)) + + def as_ab_read(self, start, size): + """ + This is the asynchronous counterpart of client.ab_read(). + """ + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + data = (type_ * size)() + logger.debug("ab_read: start: %s: size %s: " % (start, size)) + result = self.library.Cli_AsABRead(self.pointer, start, size, byref(data)) + check_error(result, context="client") + return bytearray(data) + + def as_ab_write(self, start, data): + """ + This is the asynchronous counterpart of Cli_ABWrite. + """ + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + size = len(data) + cdata = (type_ * size).from_buffer_copy(data) + logger.debug("ab write: start: %s: size: %s: " % (start, size)) + return self.library.Cli_AsABWrite(self.pointer, start, size, byref(cdata)) + + @error_wrap + def as_compress(self, time): + """ + This is the asynchronous counterpart of client.compress(). + """ + return self.library.Cli_AsCompress(self.pointer, time) + + def copy_ram_to_rom(self): + """ + + """ + return self.library.Cli_AsCopyRamToRom(self.pointer) + + def as_ct_read(self): + """ + + """ + return self.library.Cli_AsCTRead(self.pointer) + + def as_ct_write(self): + """ + + """ + return self.library.Cli_AsCTWrite(self.pointer) + + def as_db_fill(self): + """ + + """ + return self.library.Cli_AsDBFill(self.pointer) + + def as_db_get(self, db_number): + """ + This is the asynchronous counterpart of Cli_DBGet. + """ + logger.debug("db_get db_number: %s" % db_number) + _buffer = buffer_type() + result = self.library.Cli_AsDBGet(self.pointer, db_number, byref(_buffer), byref(c_int(buffer_size))) + check_error(result, context="client") + return bytearray(_buffer) + + def as_db_read(self, db_number, start, size): + """ + This is the asynchronous counterpart of Cli_DBRead. + + :returns: user buffer. + """ + logger.debug("db_read, db_number:%s, start:%s, size:%s" % ( + db_number, start, size)) + type_ = snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte] + data = (type_ * size)() + result = self.library.Cli_AsDBRead(self.pointer, db_number, start, size, byref(data)) + check_error(result, context="client") + return bytearray(data) + + def as_db_write(self, db_number, start, data): + """ + + """ + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + size = len(data) + cdata = (type_ * size).from_buffer_copy(data) + logger.debug("db_write db_number:%s start:%s size:%s data:%s" % ( + db_number, start, size, data)) + return self.library.Cli_AsDBWrite(self.pointer, db_number, start, size, byref(cdata)) + + @error_wrap + def as_download(self, data, block_num=-1): + """ + Downloads a DB data into the AG asynchronously. + A whole block (including header and footer) must be available into the + user buffer. + + :param block_num: New Block number (or -1) + :param data: the user buffer + """ + size = len(data) + type_ = c_byte * len(data) + cdata = type_.from_buffer_copy(data) + return self.library.Cli_AsDownload(self.pointer, block_num, byref(cdata), size) + + @error_wrap + def compress(self, time): + """ + Performs the Memory compress action. + + :param time: Maximum time expected to complete the operation (ms). + """ + return self.library.Cli_Compress(self.pointer, time) + + @error_wrap + def set_param(self, number, value): + """Sets an internal Server object parameter. + """ + logger.debug("setting param number %s to %s" % (number, value)) + type_ = param_types[number] + return self.library.Cli_SetParam(self.pointer, number, byref(type_(value))) + + def get_param(self, number): + """Reads an internal Client object parameter. + """ + logger.debug("retreiving param number %s" % number) + type_ = param_types[number] + value = type_() + code = self.library.Cli_GetParam(self.pointer, c_int(number), byref(value)) + check_error(code) + return value.value + + def get_pdu_length(self): + """ + Returns info about the PDU length. + """ + logger.info("getting PDU length") + requested_ = c_uint16() + negotiated_ = c_uint16() + code = self.library.Cli_GetPduLength(self.pointer, byref(requested_), byref(negotiated_)) + check_error(code) + return negotiated_.value + + def get_plc_datetime(self): + """ + Get date and time from PLC. + + :return: date and time as datetime + """ + type_ = c_int32 + buffer = (type_ * 9)() + result = self.library.Cli_GetPlcDateTime(self.pointer, byref(buffer)) + check_error(result, context="client") + return datetime(year=(buffer[5] + 1900), + month=(buffer[4] + 1), + day=(buffer[3]), + hour=(buffer[2]), + minute=(buffer[1]), + second=(buffer[0])) + + @error_wrap + def set_plc_datetime(self, dt): + """ + Set date and time in PLC + + :param dt: date and time as datetime + """ + type_ = c_int32 + buffer = (type_ * 9)() + buffer[0] = dt.second + buffer[1] = dt.minute + buffer[2] = dt.hour + buffer[3] = dt.day + buffer[4] = dt.month - 1 + buffer[5] = dt.year - 1900 + return self.library.Cli_SetPlcDateTime(self.pointer, byref(buffer)) diff --git a/APPS_UNCOMPILED/lib/snap7/common.py b/APPS_UNCOMPILED/lib/snap7/common.py new file mode 100644 index 0000000..80e52a6 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/common.py @@ -0,0 +1,112 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/common.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 3232 bytes +from ctypes import c_char +from ctypes.util import find_library +import logging +from snap7.snap7exceptions import Snap7Exception +import os, platform +if platform.system() == "Windows": + from ctypes import windll as cdll +else: + from ctypes import cdll +logger = logging.getLogger(__name__) +ipv4 = "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$" + +class ADict(dict): + __doc__ = "\n Accessing dict keys like an attribute.\n " + __getattr__ = dict.__getitem__ + __setattr__ = dict.__setitem__ + + +class Snap7Library(object): + __doc__ = "\n Snap7 loader and encapsulator. We make this a singleton to make\n sure the library is loaded only once.\n " + _instance = None + + def __new__(cls, *args, **kwargs): + if not cls._instance: + cls._instance = object.__new__(cls) + cls._instance.lib_location = None + cls._instance.cdll = None + return cls._instance + + def __init__(self, lib_location=None): + if self.cdll: + return + else: + self.lib_location = lib_location or self.lib_location or find_library("snap7") + if not self.lib_location: + other_path = [ + find_in_environ(), "/var/pycore/lib/libsnap7.so"] + for op in other_path: + if os.path.exists(op): + self.lib_location = op + break + + msg = self.lib_location or "can't find snap7 library. If installed, try running ldconfig" + raise Snap7Exception(msg) + self.cdll = cdll.LoadLibrary(self.lib_location) + + +def load_library(lib_location=None): + """ + :returns: a ctypes cdll object with the snap7 shared library loaded. + """ + return Snap7Library(lib_location).cdll + + +def check_error(code, context='client'): + """ + check if the error code is set. If so, a Python log message is generated + and an error is raised. + """ + if code: + error = error_text(code, context) + logger.error(error) + raise Snap7Exception(error) + + +def error_text(error, context='client'): + """Returns a textual explanation of a given error number + + :param error: an error integer + :param context: server, client or partner + :returns: the error string + """ + if not context in ('client', 'server', 'partner'): + raise AssertionError + else: + logger.debug("error text for %s" % hex(error)) + len_ = 1024 + text_type = c_char * len_ + text = text_type() + library = load_library() + if context == "client": + library.Cli_ErrorText(error, text, len_) + else: + if context == "server": + library.Srv_ErrorText(error, text, len_) + else: + if context == "partner": + library.Par_ErrorText(error, text, len_) + return text.value + + +def find_in_environ(): + """Find the `libsnap7.so` file according to the os environ. + + Returns: + Full path to the `libsnap7.so` file. + """ + env_path = os.environ.get("LD_LIBRARY_PATH") + if env_path: + full_path = env_path.split(":")[0] + "/libsnap7.so" + else: + full_path = "libsnap7.so" + if os.path.exists(full_path): + if os.path.isfile(full_path): + return str(full_path) diff --git a/APPS_UNCOMPILED/lib/snap7/error.py b/APPS_UNCOMPILED/lib/snap7/error.py new file mode 100644 index 0000000..6009438 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/error.py @@ -0,0 +1,101 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/error.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 3594 bytes +""" +Snap7 library error codes. + +we define all error codes here, but we don't use them (yet/anymore). +The error code formatting of the snap7 library as already quite good, +so we are using that now. But maybe we will use this in the future again. +""" +s7_client_errors = { + 1048576: '"errNegotiatingPDU"', + 2097152: '"errCliInvalidParams"', + 3145728: '"errCliJobPending"', + 4194304: '"errCliTooManyItems"', + 5242880: '"errCliInvalidWordLen"', + 6291456: '"errCliPartialDataWritten"', + 7340032: '"errCliSizeOverPDU"', + 8388608: '"errCliInvalidPlcAnswer"', + 9437184: '"errCliAddressOutOfRange"', + 10485760: '"errCliInvalidTransportSize"', + 11534336: '"errCliWriteDataSizeMismatch"', + 12582912: '"errCliItemNotAvailable"', + 13631488: '"errCliInvalidValue"', + 14680064: '"errCliCannotStartPLC"', + 15728640: '"errCliAlreadyRun"', + 16777216: '"errCliCannotStopPLC"', + 17825792: '"errCliCannotCopyRamToRom"', + 18874368: '"errCliCannotCompress"', + 19922944: '"errCliAlreadyStop"', + 20971520: '"errCliFunNotAvailable"', + 22020096: '"errCliUploadSequenceFailed"', + 23068672: '"errCliInvalidDataSizeRecvd"', + 24117248: '"errCliInvalidBlockType"', + 25165824: '"errCliInvalidBlockNumber"', + 26214400: '"errCliInvalidBlockSize"', + 27262976: '"errCliDownloadSequenceFailed"', + 28311552: '"errCliInsertRefused"', + 29360128: '"errCliDeleteRefused"', + 30408704: '"errCliNeedPassword"', + 31457280: '"errCliInvalidPassword"', + 32505856: '"errCliNoPasswordToSetOrClear"', + 33554432: '"errCliJobTimeout"', + 34603008: '"errCliPartialDataRead"', + 35651584: '"errCliBufferTooSmall"', + 36700160: '"errCliFunctionRefused"', + 37748736: '"errCliDestroying"', + 38797312: '"errCliInvalidParamNumber"', + 39845888: '"errCliCannotChangeParam"'} +isotcp_errors = { + 65536: '"errIsoConnect"', + 131072: '"errIsoDisconnect"', + 196608: '"errIsoInvalidPDU"', + 262144: '"errIsoInvalidDataSize"', + 327680: '"errIsoNullPointer"', + 393216: '"errIsoShortPacket"', + 458752: '"errIsoTooManyFragments"', + 524288: '"errIsoPduOverflow"', + 589824: '"errIsoSendPacket"', + 655360: '"errIsoRecvPacket"', + 720896: '"errIsoInvalidParams"', + 786432: '"errIsoResvd_1"', + 851968: '"errIsoResvd_2"', + 917504: '"errIsoResvd_3"', + 983040: '"errIsoResvd_4"'} +tcp_errors = { + 1: '"evcServerStarted"', + 2: '"evcServerStopped"', + 4: '"evcListenerCannotStart"', + 8: '"evcClientAdded"', + 16: '"evcClientRejected"', + 32: '"evcClientNoRoom"', + 64: '"evcClientException"', + 128: '"evcClientDisconnected"', + 256: '"evcClientTerminated"', + 512: '"evcClientsDropped"', + 1024: '"evcReserved_00000400"', + 2048: '"evcReserved_00000800"', + 4096: '"evcReserved_00001000"', + 8192: '"evcReserved_00002000"', + 16384: '"evcReserved_00004000"', + 32768: '"evcReserved_00008000"'} +s7_server_errors = { + 1048576: '"errSrvCannotStart"', + 2097152: '"errSrvDBNullPointer"', + 3145728: '"errSrvAreaAlreadyExists"', + 4194304: '"errSrvUnknownArea"', + 5242880: '"verrSrvInvalidParams"', + 6291456: '"errSrvTooManyDB"', + 7340032: '"errSrvInvalidParamNumber"', + 8388608: '"errSrvCannotChangeParam"'} +client_errors = s7_client_errors.copy() +client_errors.update(isotcp_errors) +client_errors.update(tcp_errors) +server_errors = s7_server_errors.copy() +server_errors.update(isotcp_errors) +server_errors.update(tcp_errors) diff --git a/APPS_UNCOMPILED/lib/snap7/logo.py b/APPS_UNCOMPILED/lib/snap7/logo.py new file mode 100644 index 0000000..cfd8bc7 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/logo.py @@ -0,0 +1,277 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/logo.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 11891 bytes +""" +Snap7 client used for connection to a siemens LOGO 7/8 server. +""" +import re +from ctypes import c_int, byref, c_uint16, c_int32 +from ctypes import c_void_p +import logging, struct, snap7 +from snap7 import snap7types +from snap7.snap7types import S7Object +from snap7.snap7types import param_types +from snap7.common import check_error, load_library, ipv4 +from snap7.snap7exceptions import Snap7Exception +logger = logging.getLogger(__name__) + +class Logo(object): + __doc__ = "\n A snap7 Siemens Logo client: There are two main comfort functions available :func:`Logo.read` and :func:`Logo.write`. \n This functions realize a high level access to the VM addresses of the Siemens Logo just use the form:\n \n * V10.3 for bit values\n * V10 for the complete byte\n * VW12 for a word (used for analog values)\n \n For more information see examples for Siemens Logo 7 and 8 \n " + + def __init__(self): + self.pointer = False + self.library = load_library() + self.create() + + def __del__(self): + self.destroy() + + def create(self): + """ + create a SNAP7 client. + """ + logger.info("creating snap7 client") + self.library.Cli_Create.restype = c_void_p + self.pointer = S7Object(self.library.Cli_Create()) + + def destroy(self): + """ + destroy a client. + """ + logger.info("destroying snap7 client") + return self.library.Cli_Destroy(byref(self.pointer)) + + def disconnect(self): + """ + disconnect a client. + """ + logger.info("disconnecting snap7 client") + result = self.library.Cli_Disconnect(self.pointer) + check_error(result, context="client") + return result + + def connect(self, ip_address, tsap_snap7, tsap_logo, tcpport=102): + """ + Connect to a Siemens LOGO server. Howto setup Logo communication configuration see: http://snap7.sourceforge.net/logo.html + + :param ip_address: IP ip_address of server + :param tsap_snap7: TSAP SNAP7 Client (e.g. 10.00 = 0x1000) + :param tsap_logo: TSAP Logo Server (e.g. 20.00 = 0x2000) + """ + logger.info("connecting to %s:%s tsap_snap7 %s tsap_logo %s" % (ip_address, tcpport, + tsap_snap7, tsap_logo)) + self.set_param(snap7.snap7types.RemotePort, tcpport) + self.set_connection_params(ip_address, tsap_snap7, tsap_logo) + result = self.library.Cli_Connect(self.pointer) + check_error(result, context="client") + return result + + def read(self, vm_address): + """ + Reads from VM addresses of Siemens Logo. Examples: read("V40") / read("VW64") / read("V10.2") + + :param vm_address: of Logo memory (e.g. V30.1, VW32, V24) + :returns: integer + """ + area = snap7types.S7AreaDB + db_number = 1 + size = 1 + start = 0 + wordlen = 0 + logger.debug("read, vm_address:%s" % vm_address) + if re.match("V[0-9]{1,4}\\.[0-7]{1}", vm_address): + logger.info("read, Bit address: " + vm_address) + address = vm_address[1[:None]].split(".") + address_byte = int(address[0]) + address_bit = int(address[1]) + start = address_byte * 8 + address_bit + wordlen = snap7types.S7WLBit + else: + if re.match("V[0-9]+", vm_address): + logger.info("Byte address: " + vm_address) + start = int(vm_address[1[:None]]) + wordlen = snap7types.S7WLByte + else: + if re.match("VW[0-9]+", vm_address): + logger.info("Word address: " + vm_address) + start = int(vm_address[2[:None]]) + wordlen = snap7types.S7WLWord + else: + if re.match("VD[0-9]+", vm_address): + logger.info("DWord address: " + vm_address) + start = int(vm_address[2[:None]]) + wordlen = snap7types.S7WLDWord + else: + logger.info("Unknown address format") + return 0 + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + data = (type_ * size)() + logger.debug("start:%s, wordlen:%s, data-length:%s" % (start, wordlen, len(data))) + result = self.library.Cli_ReadArea(self.pointer, area, db_number, start, size, wordlen, byref(data)) + check_error(result, context="client") + if wordlen == snap7types.S7WLBit: + return data[0] + if wordlen == snap7types.S7WLByte: + return struct.unpack_from(">B", data)[0] + if wordlen == snap7types.S7WLWord: + return struct.unpack_from(">h", data)[0] + if wordlen == snap7types.S7WLDWord: + return struct.unpack_from(">l", data)[0] + + def write(self, vm_address, value): + """ + Writes to VM addresses of Siemens Logo. + Example: write("VW10", 200) or write("V10.3", 1) + + :param vm_address: write offset + :param value: integer + """ + area = snap7types.S7AreaDB + db_number = 1 + start = 0 + amount = 1 + wordlen = 0 + data = bytearray(0) + logger.debug("write, vm_address:%s, value:%s" % ( + vm_address, value)) + if re.match("^V[0-9]{1,4}\\.[0-7]{1}$", vm_address): + logger.info("read, Bit address: " + vm_address) + address = vm_address[1[:None]].split(".") + address_byte = int(address[0]) + address_bit = int(address[1]) + start = address_byte * 8 + address_bit + wordlen = snap7types.S7WLBit + if value > 0: + data = bytearray([1]) + else: + data = bytearray([0]) + else: + if re.match("^V[0-9]+$", vm_address): + logger.info("Byte address: " + vm_address) + start = int(vm_address[1[:None]]) + wordlen = snap7types.S7WLByte + data = bytearray(struct.pack(">B", value)) + else: + if re.match("^VW[0-9]+$", vm_address): + logger.info("Word address: " + vm_address) + start = int(vm_address[2[:None]]) + wordlen = snap7types.S7WLWord + data = bytearray(struct.pack(">h", value)) + else: + if re.match("^VD[0-9]+$", vm_address): + logger.info("DWord address: " + vm_address) + start = int(vm_address[2[:None]]) + wordlen = snap7types.S7WLDWord + data = bytearray(struct.pack(">l", value)) + else: + logger.info("write, Unknown address format: " + vm_address) + return 1 + if wordlen == snap7types.S7WLBit: + type_ = snap7.snap7types.wordlen_to_ctypes[snap7types.S7WLByte] + else: + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + cdata = (type_ * amount).from_buffer_copy(data) + logger.debug("write, vm_address:%s value:%s" % (vm_address, value)) + result = self.library.Cli_WriteArea(self.pointer, area, db_number, start, amount, wordlen, byref(cdata)) + check_error(result, context="client") + return result + + def db_read(self, db_number, start, size): + """ + This is a lean function of Cli_ReadArea() to read PLC DB. + + :param db_number: for Logo only DB=1 + :param start: start address for Logo7 0..951 / Logo8 0..1469 + :param size: in bytes + :returns: array of bytes + """ + logger.debug("db_read, db_number:%s, start:%s, size:%s" % ( + db_number, start, size)) + type_ = snap7.snap7types.wordlen_to_ctypes[snap7.snap7types.S7WLByte] + data = (type_ * size)() + result = self.library.Cli_DBRead(self.pointer, db_number, start, size, byref(data)) + check_error(result, context="client") + return bytearray(data) + + def db_write(self, db_number, start, data): + """ + Writes to a DB object. + + :param db_number: for Logo only DB=1 + :param start: start address for Logo7 0..951 / Logo8 0..1469 + :param data: bytearray + """ + wordlen = snap7.snap7types.S7WLByte + type_ = snap7.snap7types.wordlen_to_ctypes[wordlen] + size = len(data) + cdata = (type_ * size).from_buffer_copy(data) + logger.debug("db_write db_number:%s start:%s size:%s data:%s" % ( + db_number, start, size, data)) + result = self.library.Cli_DBWrite(self.pointer, db_number, start, size, byref(cdata)) + check_error(result, context="client") + return result + + def set_connection_params(self, ip_address, tsap_snap7, tsap_logo): + """ + Sets internally (IP, LocalTSAP, RemoteTSAP) Coordinates. + This function must be called just before Cli_Connect(). + + :param ip_address: IP ip_address of server + :param tsap_snap7: TSAP SNAP7 Client (e.g. 10.00 = 0x1000) + :param tsap_logo: TSAP Logo Server (e.g. 20.00 = 0x2000) + """ + assert re.match(ipv4, ip_address), "%s is invalid ipv4" % ip_address + result = self.library.Cli_SetConnectionParams(self.pointer, ip_address.encode(), c_uint16(tsap_snap7), c_uint16(tsap_logo)) + if result != 0: + raise Snap7Exception("The parameter was invalid") + + def set_connection_type(self, connection_type): + """ + Sets the connection resource type, i.e the way in which the Clients + connects to a PLC. + + :param connection_type: 1 for PG, 2 for OP, 3 to 10 for S7 Basic + """ + result = self.library.Cli_SetConnectionType(self.pointer, c_uint16(connection_type)) + if result != 0: + raise Snap7Exception("The parameter was invalid") + + def get_connected(self): + """ + Returns the connection status + + :returns: a boolean that indicates if connected. + """ + connected = c_int32() + result = self.library.Cli_GetConnected(self.pointer, byref(connected)) + check_error(result, context="client") + return bool(connected) + + def set_param(self, number, value): + """Sets an internal Server object parameter. + + :param number: Parameter type number + :param value: Parameter value + """ + logger.debug("setting param number %s to %s" % (number, value)) + type_ = param_types[number] + result = self.library.Cli_SetParam(self.pointer, number, byref(type_(value))) + check_error(result, context="client") + return result + + def get_param(self, number): + """Reads an internal Logo object parameter. + + :param number: Parameter type number + :returns: Parameter value + """ + logger.debug("retreiving param number %s" % number) + type_ = param_types[number] + value = type_() + code = self.library.Cli_GetParam(self.pointer, c_int(number), byref(value)) + check_error(code) + return value.value diff --git a/APPS_UNCOMPILED/lib/snap7/partner.py b/APPS_UNCOMPILED/lib/snap7/partner.py new file mode 100644 index 0000000..3ae3818 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/partner.py @@ -0,0 +1,218 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/partner.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 7780 bytes +""" +Snap7 code for partnering with a siemens 7 server. + +This allows you to create a S7 peer to peer communication. Unlike the +client-server model, where the client makes a request and the server replies to +it, the peer to peer model sees two components with same rights, each of them +can send data asynchronously. The only difference between them is the one who +is requesting the connection. +""" +import ctypes, logging, re +from snap7.common import load_library, check_error, ipv4 +import snap7.snap7types +from snap7.snap7exceptions import Snap7Exception +logger = logging.getLogger(__name__) + +def error_wrap(func): + """Parses a s7 error code returned the decorated function.""" + + def f(*args, **kw): + code = func(*args, **kw) + check_error(code, context="partner") + + return f + + +class Partner(object): + __doc__ = "\n A snap7 partner.\n " + pointer = None + library = None + + def __init__(self, active=False): + self.library = load_library() + self.create(active) + + def __del__(self): + self.destroy() + + def as_b_send(self): + """ + Sends a data packet to the partner. This function is asynchronous, i.e. + it terminates immediately, a completion method is needed to know when + the transfer is complete. + """ + return self.library.Par_AsBSend(self.pointer) + + def b_recv(self): + """ + Receives a data packet from the partner. This function is + synchronous, it waits until a packet is received or the timeout + supplied expires. + """ + return self.library.Par_BRecv(self.pointer) + + def b_send(self): + """ + Sends a data packet to the partner. This function is synchronous, i.e. + it terminates when the transfer job (send+ack) is complete. + """ + return self.library.Par_BSend(self.pointer) + + def check_as_b_recv_completion(self): + """ + Checks if a packed received was received. + """ + return self.library.Par_CheckAsBRecvCompletion(self.pointer) + + def check_as_b_send_completion(self): + """ + Checks if the current asynchronous send job was completed and terminates + immediately. + """ + op_result = ctypes.c_int32() + result = self.library.Par_CheckAsBSendCompletion(self.pointer, ctypes.byref(op_result)) + return_values = {0:"job complete", + 1:"job in progress", + -2:"invalid handled supplied"} + if result == -2: + raise Snap7Exception("The Client parameter was invalid") + return (return_values[result], op_result) + + def create(self, active=False): + """ + Creates a Partner and returns its handle, which is the reference that + you have to use every time you refer to that Partner. + + :param active: 0 + :returns: a pointer to the partner object + """ + self.library.Par_Create.restype = snap7.snap7types.S7Object + self.pointer = snap7.snap7types.S7Object(self.library.Par_Create(int(active))) + + def destroy(self): + """ + Destroy a Partner of given handle. + Before destruction the Partner is stopped, all clients disconnected and + all shared memory blocks released. + """ + if self.library: + return self.library.Par_Destroy(ctypes.byref(self.pointer)) + + def get_last_error(self): + """ + Returns the last job result. + """ + error = ctypes.c_int32() + result = self.library.Par_GetLastError(self.pointer, ctypes.byref(error)) + check_error(result, "partner") + return error + + def get_param(self, number): + """ + Reads an internal Partner object parameter. + """ + logger.debug("retreiving param number %s" % number) + type_ = snap7.snap7types.param_types[number] + value = type_() + code = self.library.Par_GetParam(self.pointer, ctypes.c_int(number), ctypes.byref(value)) + check_error(code) + return value.value + + def get_stats(self): + """ + Returns some statistics. + + :returns: a tuple containing bytes send, received, send errors, recv errors + """ + sent = ctypes.c_uint32() + recv = ctypes.c_uint32() + send_errors = ctypes.c_uint32() + recv_errors = ctypes.c_uint32() + result = self.library.Par_GetStats(self.pointer, ctypes.byref(sent), ctypes.byref(recv), ctypes.byref(send_errors), ctypes.byref(recv_errors)) + check_error(result, "partner") + return (sent, recv, send_errors, recv_errors) + + def get_status(self): + """ + Returns the Partner status. + """ + status = ctypes.c_int32() + result = self.library.Par_GetStatus(self.pointer, ctypes.byref(status)) + check_error(result, "partner") + return status + + def get_times(self): + """ + Returns the last send and recv jobs execution time in milliseconds. + """ + send_time = ctypes.c_int32() + recv_time = ctypes.c_int32() + result = self.library.Par_GetTimes(self.pointer, ctypes.byref(send_time), ctypes.byref(recv_time)) + check_error(result, "partner") + return (send_time, recv_time) + + @error_wrap + def set_param(self, number, value): + """Sets an internal Partner object parameter. + """ + logger.debug("setting param number %s to %s" % (number, value)) + return self.library.Par_SetParam(self.pointer, number, ctypes.byref(ctypes.c_int(value))) + + def set_recv_callback(self): + """ + Sets the user callback that the Partner object has to call when a data + packet is incoming. + """ + return self.library.Par_SetRecvCallback(self.pointer) + + def set_send_callback(self): + """ + Sets the user callback that the Partner object has to call when the + asynchronous data sent is complete. + """ + return self.library.Par_SetSendCallback(self.pointer) + + @error_wrap + def start(self): + """ + Starts the Partner and binds it to the specified IP address and the + IsoTCP port. + """ + return self.library.Par_Start(self.pointer) + + @error_wrap + def start_to(self, local_ip, remote_ip, local_tsap, remote_tsap): + """ + Starts the Partner and binds it to the specified IP address and the + IsoTCP port. + + :param local_ip: PC host IPV4 Address. "0.0.0.0" is the default adapter + :param remote_ip: PLC IPV4 Address + :param local_tsap: Local TSAP + :param remote_tsap: PLC TSAP + """ + assert re.match(ipv4, local_ip), "%s is invalid ipv4" % local_ip + assert re.match(ipv4, remote_ip), "%s is invalid ipv4" % remote_ip + logger.info("starting partnering from %s to %s" % (local_ip, remote_ip)) + return self.library.Par_StartTo(self.pointer, local_ip, remote_ip, ctypes.c_uint16(local_tsap), ctypes.c_uint16(remote_tsap)) + + def stop(self): + """ + Stops the Partner, disconnects gracefully the remote partner. + """ + return self.library.Par_Stop(self.pointer) + + @error_wrap + def wait_as_b_send_completion(self, timeout=0): + """ + Waits until the current asynchronous send job is done or the timeout + expires. + """ + return self.library.Par_WaitAsBSendCompletion(self.pointer, timeout) diff --git a/APPS_UNCOMPILED/lib/snap7/server.py b/APPS_UNCOMPILED/lib/snap7/server.py new file mode 100644 index 0000000..c09d5c7 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/server.py @@ -0,0 +1,279 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/server.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 10141 bytes +""" +Snap7 server used for mimicking a siemens 7 server. +""" +import ctypes, logging, re, snap7.snap7types +from snap7.common import check_error, load_library, ipv4 +from snap7 import six +logger = logging.getLogger(__name__) + +def error_wrap(func): + """Parses a s7 error code returned the decorated function.""" + + def f(*args, **kw): + code = func(*args, **kw) + check_error(code, context="server") + + return f + + +class Server(object): + __doc__ = "\n A fake S7 server.\n " + pointer = None + callback = None + library = None + + def __init__(self, log=True): + """ + Create a fake S7 server. set log to false if you want to disable + event logging to python logging. + """ + self.library = load_library() + self.create() + if log: + self._set_log_callback() + + def __del__(self): + self.destroy() + + def event_text(self, event): + """Returns a textual explanation of a given event object + + :param event: an PSrvEvent struct object + :returns: the error string + """ + logger.debug("error text for %s" % hex(event.EvtCode)) + len_ = 1024 + text_type = ctypes.c_char * len_ + text = text_type() + error = self.library.Srv_EventText(ctypes.byref(event), ctypes.byref(text), len_) + check_error(error) + if six.PY2: + return text.value + return text.value.decode("ascii") + + def create(self): + """ + create the server. + """ + logger.info("creating server") + self.library.Srv_Create.restype = snap7.snap7types.S7Object + self.pointer = snap7.snap7types.S7Object(self.library.Srv_Create()) + + @error_wrap + def register_area(self, area_code, index, userdata): + """Shares a memory area with the server. That memory block will be + visible by the clients. + """ + size = ctypes.sizeof(userdata) + logger.info("registering area %s, index %s, size %s" % (area_code, + index, size)) + size = ctypes.sizeof(userdata) + return self.library.Srv_RegisterArea(self.pointer, area_code, index, ctypes.byref(userdata), size) + + @error_wrap + def set_events_callback(self, call_back): + """Sets the user callback that the Server object has to call when an + event is created. + """ + logger.info("setting event callback") + callback_wrap = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.POINTER(snap7.snap7types.SrvEvent), ctypes.c_int) + + def wrapper(usrptr, pevent, size): + """ + Wraps python function into a ctypes function + + :param usrptr: not used + :param pevent: pointer to snap7 event struct + :param size: + :returns: should return an int + """ + logger.info("callback event: " + self.event_text(pevent.contents)) + call_back(pevent.contents) + return 0 + + self._callback = callback_wrap(wrapper) + usrPtr = ctypes.c_void_p() + return self.library.Srv_SetEventsCallback(self.pointer, self._callback, usrPtr) + + @error_wrap + def set_read_events_callback(self, call_back): + """ + Sets the user callback that the Server object has to call when a Read + event is created. + + :param call_back: a callback function that accepts a pevent argument. + """ + logger.info("setting read event callback") + callback_wrapper = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.POINTER(snap7.snap7types.SrvEvent), ctypes.c_int) + + def wrapper(usrptr, pevent, size): + """ + Wraps python function into a ctypes function + + :param usrptr: not used + :param pevent: pointer to snap7 event struct + :param size: + :returns: should return an int + """ + logger.info("callback event: " + self.event_text(pevent.contents)) + call_back(pevent.contents) + return 0 + + self._read_callback = callback_wrapper(wrapper) + return self.library.Srv_SetReadEventsCallback(self.pointer, self._read_callback) + + def _set_log_callback(self): + """Sets a callback that logs the events + """ + logger.debug("setting up event logger") + + def log_callback(event): + logger.info("callback event: " + self.event_text(event)) + + self.set_events_callback(log_callback) + + @error_wrap + def start(self, tcpport=102): + """ + start the server. + """ + if tcpport != 102: + logger.info("setting server TCP port to %s" % tcpport) + self.set_param(snap7.snap7types.LocalPort, tcpport) + logger.info("starting server on 0.0.0.0:%s" % tcpport) + return self.library.Srv_Start(self.pointer) + + @error_wrap + def stop(self): + """ + stop the server. + """ + logger.info("stopping server") + return self.library.Srv_Stop(self.pointer) + + def destroy(self): + """ + destroy the server. + """ + logger.info("destroying server") + if self.library: + self.library.Srv_Destroy(ctypes.byref(self.pointer)) + + def get_status(self): + """Reads the server status, the Virtual CPU status and the number of + the clients connected. + + :returns: server status, cpu status, client count + """ + logger.debug("get server status") + server_status = ctypes.c_int() + cpu_status = ctypes.c_int() + clients_count = ctypes.c_int() + error = self.library.Srv_GetStatus(self.pointer, ctypes.byref(server_status), ctypes.byref(cpu_status), ctypes.byref(clients_count)) + check_error(error) + logger.debug("status server %s cpu %s clients %s" % ( + server_status.value, cpu_status.value, + clients_count.value)) + return (snap7.snap7types.server_statuses[server_status.value], + snap7.snap7types.cpu_statuses[cpu_status.value], + clients_count.value) + + @error_wrap + def unregister_area(self, area_code, index): + """'Unshares' a memory area previously shared with Srv_RegisterArea(). + That memory block will be no longer visible by the clients. + """ + return self.library.Srv_UnregisterArea(self.pointer, area_code, index) + + @error_wrap + def unlock_area(self, code, index): + """Unlocks a previously locked shared memory area. + """ + logger.debug("unlocking area code %s index %s" % (code, index)) + return self.library.Srv_UnlockArea(self.pointer, code, index) + + @error_wrap + def lock_area(self, code, index): + """Locks a shared memory area. + """ + logger.debug("locking area code %s index %s" % (code, index)) + return self.library.Srv_LockArea(self.pointer, code, index) + + @error_wrap + def start_to(self, ip, tcpport=102): + """ + start server on a specific interface. + """ + if tcpport != 102: + logger.info("setting server TCP port to %s" % tcpport) + self.set_param(snap7.snap7types.LocalPort, tcpport) + assert re.match(ipv4, ip), "%s is invalid ipv4" % ip + logger.info("starting server to %s:102" % ip) + return self.library.Srv_Start(self.pointer, ip) + + @error_wrap + def set_param(self, number, value): + """Sets an internal Server object parameter. + """ + logger.debug("setting param number %s to %s" % (number, value)) + return self.library.Srv_SetParam(self.pointer, number, ctypes.byref(ctypes.c_int(value))) + + @error_wrap + def set_mask(self, kind, mask): + """Writes the specified filter mask. + """ + logger.debug("setting mask kind %s to %s" % (kind, mask)) + return self.library.Srv_SetMask(self.pointer, kind, mask) + + @error_wrap + def set_cpu_status(self, status): + """Sets the Virtual CPU status. + """ + assert status in snap7.snap7types.cpu_statuses, "unknown cpu state %s" % status + logger.debug("setting cpu status to %s" % status) + return self.library.Srv_SetCpuStatus(self.pointer, status) + + def pick_event(self): + """Extracts an event (if available) from the Events queue. + """ + logger.debug("checking event queue") + event = snap7.snap7types.SrvEvent() + ready = ctypes.c_int32() + code = self.library.Srv_PickEvent(self.pointer, ctypes.byref(event), ctypes.byref(ready)) + check_error(code) + if ready: + logger.debug("one event ready: %s" % event) + return event + logger.debug("no events ready") + + def get_param(self, number): + """Reads an internal Server object parameter. + """ + logger.debug("retreiving param number %s" % number) + value = ctypes.c_int() + code = self.library.Srv_GetParam(self.pointer, number, ctypes.byref(value)) + check_error(code) + return value.value + + def get_mask(self, kind): + """Reads the specified filter mask. + """ + logger.debug("retrieving mask kind %s" % kind) + mask = snap7.snap7types.longword() + code = self.library.Srv_GetMask(self.pointer, kind, ctypes.byref(mask)) + check_error(code) + return mask + + @error_wrap + def clear_events(self): + """Empties the Event queue. + """ + logger.debug("clearing event queue") + return self.library.Srv_ClearEvents(self.pointer) diff --git a/APPS_UNCOMPILED/lib/snap7/six.py b/APPS_UNCOMPILED/lib/snap7/six.py new file mode 100644 index 0000000..6c30216 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/six.py @@ -0,0 +1,721 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/six.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 26731 bytes +"""Utilities for writing code that runs on Python 2 and 3""" +import functools, operator, sys, types +__author__ = "Benjamin Peterson " +__version__ = "1.7.3" +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +if PY3: + string_types = ( + str,) + integer_types = (int,) + class_types = (type,) + text_type = str + binary_type = bytes + MAXSIZE = sys.maxsize +else: + string_types = ( + basestring,) + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + if sys.platform.startswith("java"): + MAXSIZE = int(2147483647) + else: + + class X(object): + + def __len__(self): + return 2147483648L + + + try: + len(X()) + except OverflowError: + MAXSIZE = int(2147483647) + else: + MAXSIZE = int(9223372036854775807L) + del X + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) + delattr(obj.__class__, self.name) + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = [ + "__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + else: + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + __doc__ = "\n A meta path importer to import six.moves and its submodules.\n\n This class implements a PEP302 finder and loader. It should be compatible\n with Python 2.5 and all existing versions of Python3\n " + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + return sys.modules[fullname] + except KeyError: + pass + + mod = self._SixMetaPathImporter__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self._SixMetaPathImporter__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self._SixMetaPathImporter__get_module(fullname) + + get_source = get_code + + +_importer = _SixMetaPathImporter(__name__) + +class _MovedItems(_LazyModule): + __doc__ = "Lazy loading of moved objects" + __path__ = [] + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), + MovedModule("winreg", "_winreg")] +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) + +del attr +_MovedItems._moved_attributes = _moved_attributes +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + +class Module_six_moves_urllib_parse(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_parse" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse")] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) + +del attr +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") + +class Module_six_moves_urllib_error(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_error" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error")] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) + +del attr +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") + +class Module_six_moves_urllib_request(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_request" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request")] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) + +del attr +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") + +class Module_six_moves_urllib_response(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_response" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response")] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) + +del attr +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") + +class Module_six_moves_urllib_robotparser(_LazyModule): + __doc__ = "Lazy loading of moved objects in six.moves.urllib_robotparser" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser")] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) + +del attr +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") + +class Module_six_moves_urllib(types.ModuleType): + __doc__ = "Create a six.moves.urllib namespace that resembles the Python 3 namespace" + __path__ = [] + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return [ + 'parse', 'error', 'request', 'response', 'robotparser'] + + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" +try: + advance_iterator = next +except NameError: + + def advance_iterator(it): + return it.next() + + +next = advance_iterator +try: + callable = callable +except NameError: + + def callable(obj): + return any(("__call__" in klass.__dict__ for klass in type(obj).__mro__)) + + +if PY3: + + def get_unbound_function(unbound): + return unbound + + + create_bound_method = types.MethodType + Iterator = object +else: + + def get_unbound_function(unbound): + return unbound.im_func + + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + + callable = callable +_add_doc(get_unbound_function, "Get the function out of a possibly unbound function") +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) +if PY3: + + def iterkeys(d, **kw): + return iter((d.keys)(**kw)) + + + def itervalues(d, **kw): + return iter((d.values)(**kw)) + + + def iteritems(d, **kw): + return iter((d.items)(**kw)) + + + def iterlists(d, **kw): + return iter((d.lists)(**kw)) + + +else: + + def iterkeys(d, **kw): + return iter((d.iterkeys)(**kw)) + + + def itervalues(d, **kw): + return iter((d.itervalues)(**kw)) + + + def iteritems(d, **kw): + return iter((d.iteritems)(**kw)) + + + def iterlists(d, **kw): + return iter((d.iterlists)(**kw)) + + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") +if PY3: + + def b(s): + return s.encode("latin-1") + + + def u(s): + return s + + + unichr = chr + if sys.version_info[1] <= 1: + + def int2byte(i): + return bytes((i,)) + + + else: + int2byte = operator.methodcaller("to_bytes", 1, "big") + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO +else: + + def b(s): + return s + + + def u(s): + return unicode(s.replace("\\\\", "\\\\\\\\"), "unicode_escape") + + + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + + def indexbytes(buf, i): + return ord(buf[i]) + + + def iterbytes(buf): + return (ord(byte) for byte in buf) + + + import StringIO + StringIO = BytesIO = StringIO.StringIO +_add_doc(b, "Byte literal") +_add_doc(u, "Text literal") +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + +else: + + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + else: + if _locs_ is None: + _locs_ = _globs_ + exec("exec _code_ in _globs_, _locs_") + + + exec_("def reraise(tp, value, tb=None):\n raise tp, value, tb\n") +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + elif isinstance(fp, file): + if isinstance(data, unicode) and fp.encoding is not None: + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + else: + if not isinstance(sep, str): + raise TypeError("sep must be None or a string") + else: + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + else: + if not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + + write(end) + + +else: + _add_doc(reraise, "Reraise an exception.") + if sys.version_info[0[:2]] < (3, 4): + + def wraps(wrapped): + + def wrapper(f): + f = functools.wraps(wrapped)(f) + f.__wrapped__ = wrapped + return f + + return wrapper + + + else: + wraps = functools.wraps + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + + return type.__new__(metaclass, "temporary_class", (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + + def wrapper(cls): + orig_vars = cls.__dict__.copy() + orig_vars.pop("__dict__", None) + orig_vars.pop("__weakref__", None) + slots = orig_vars.get("__slots__") + if slots is not None: + if isinstance(slots, str): + slots = [ + slots] + for slots_var in slots: + orig_vars.pop(slots_var) + + return metaclass(cls.__name__, cls.__bases__, orig_vars) + + return wrapper + + +__path__ = [] +__package__ = __name__ +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + if type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__: + del sys.meta_path[i] + break + + del i + del importer +sys.meta_path.append(_importer) diff --git a/APPS_UNCOMPILED/lib/snap7/snap7exceptions.py b/APPS_UNCOMPILED/lib/snap7/snap7exceptions.py new file mode 100644 index 0000000..1cd9cd4 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/snap7exceptions.py @@ -0,0 +1,11 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/snap7exceptions.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 90 bytes + + +class Snap7Exception(Exception): + __doc__ = "\n A Snap7 specific exception.\n " diff --git a/APPS_UNCOMPILED/lib/snap7/snap7types.py b/APPS_UNCOMPILED/lib/snap7/snap7types.py new file mode 100644 index 0000000..b280753 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/snap7types.py @@ -0,0 +1,236 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/snap7types.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 5812 bytes +""" +Python equivalent for snap7 specific types. +""" +import ctypes +from snap7.common import ADict +S7Object = ctypes.c_void_p +buffer_size = 65536 +buffer_type = ctypes.c_ubyte * buffer_size +time_t = ctypes.c_uint64 +word = ctypes.c_uint16 +longword = ctypes.c_uint32 +LocalPort = 1 +RemotePort = 2 +PingTimeout = 3 +SendTimeout = 4 +RecvTimeout = 5 +WorkInterval = 6 +SrcRef = 7 +DstRef = 8 +SrcTSap = 9 +PDURequest = 10 +MaxClients = 11 +BSendTimeout = 12 +BRecvTimeout = 13 +RecoveryTime = 14 +KeepAliveTime = 15 +param_types = ADict({LocalPort: (ctypes.c_uint16), + RemotePort: (ctypes.c_uint16), + PingTimeout: (ctypes.c_int32), + SendTimeout: (ctypes.c_int32), + RecvTimeout: (ctypes.c_int32), + WorkInterval: (ctypes.c_int32), + SrcRef: (ctypes.c_uint16), + DstRef: (ctypes.c_uint16), + SrcTSap: (ctypes.c_uint16), + PDURequest: (ctypes.c_int32), + MaxClients: (ctypes.c_int32), + BSendTimeout: (ctypes.c_int32), + BRecvTimeout: (ctypes.c_int32), + RecoveryTime: (ctypes.c_uint32), + KeepAliveTime: (ctypes.c_uint32)}) +mkEvent = 0 +mkLog = 1 +S7AreaPE = 129 +S7AreaPA = 130 +S7AreaMK = 131 +S7AreaDB = 132 +S7AreaCT = 28 +S7AreaTM = 29 +areas = ADict({ + 'PE': 129, + 'PA': 130, + 'MK': 131, + 'DB': 132, + 'CT': 28, + 'TM': 29}) +S7WLBit = 1 +S7WLByte = 2 +S7WLWord = 4 +S7WLDWord = 6 +S7WLReal = 8 +S7WLCounter = 28 +S7WLTimer = 29 +srvAreaPE = 0 +srvAreaPA = 1 +srvAreaMK = 2 +srvAreaCT = 3 +srvAreaTM = 4 +srvAreaDB = 5 +server_areas = ADict({ + 'PE': 0, + 'PA': 1, + 'MK': 2, + 'CT': 3, + 'TM': 4, + 'DB': 5}) +wordlen_to_ctypes = ADict({S7WLBit: (ctypes.c_int16), + S7WLByte: (ctypes.c_int8), + S7WLWord: (ctypes.c_int16), + S7WLDWord: (ctypes.c_int32), + S7WLReal: (ctypes.c_int32), + S7WLCounter: (ctypes.c_int16), + S7WLTimer: (ctypes.c_int16)}) +block_types = ADict({'OB':(ctypes.c_int)(56), + 'DB':(ctypes.c_int)(65), + 'SDB':(ctypes.c_int)(66), + 'FC':(ctypes.c_int)(67), + 'SFC':(ctypes.c_int)(68), + 'FB':(ctypes.c_int)(69), + 'SFB':(ctypes.c_int)(70)}) +server_statuses = {0:"SrvStopped", + 1:"SrvRunning", + 2:"SrvError"} +cpu_statuses = {0:"S7CpuStatusUnknown", + 4:"S7CpuStatusStop", + 8:"S7CpuStatusRun"} + +class SrvEvent(ctypes.Structure): + _fields_ = [ + ( + "EvtTime", time_t), + ( + "EvtSender", ctypes.c_int), + ( + "EvtCode", longword), + ( + "EvtRetCode", word), + ( + "EvtParam1", word), + ( + "EvtParam2", word), + ( + "EvtParam3", word), + ( + "EvtParam4", word)] + + def __str__(self): + return "" % ( + self.EvtTime, self.EvtSender, self.EvtCode, + self.EvtRetCode, self.EvtParam1, self.EvtParam2, + self.EvtParam3, self.EvtParam4) + + +class BlocksList(ctypes.Structure): + _fields_ = [ + ( + "OBCount", ctypes.c_int32), + ( + "FBCount", ctypes.c_int32), + ( + "FCCount", ctypes.c_int32), + ( + "SFBCount", ctypes.c_int32), + ( + "SFCCount", ctypes.c_int32), + ( + "DBCount", ctypes.c_int32), + ( + "SDBCount", ctypes.c_int32)] + + def __str__(self): + return "" % ( + self.OBCount, self.FBCount, self.FCCount, + self.SFBCount, self.SFCCount, self.DBCount, + self.SDBCount) + + +class TS7BlockInfo(ctypes.Structure): + _fields_ = [ + ( + "BlkType", ctypes.c_int32), + ( + "BlkNumber", ctypes.c_int32), + ( + "BlkLang", ctypes.c_int32), + ( + "BlkFlags", ctypes.c_int32), + ( + "MC7Size", ctypes.c_int32), + ( + "LoadSize", ctypes.c_int32), + ( + "LocalData", ctypes.c_int32), + ( + "SBBLength", ctypes.c_int32), + ( + "CheckSum", ctypes.c_int32), + ( + "Version", ctypes.c_int32), + ( + "CodeDate", ctypes.c_char * 11), + ( + "IntfDate", ctypes.c_char * 11), + ( + "Author", ctypes.c_char * 9), + ( + "Family", ctypes.c_char * 9), + ( + "Header", ctypes.c_char * 9)] + + def __str__(self): + return " Block type: %s\n Block number: %s\n Block language: %s\n Block flags: %s\n MC7Size: %s\n Load memory size: %s\n Local data: %s\n SBB Length: %s\n Checksum: %s\n Version: %s\n Code date: %s\n Interface date: %s\n Author: %s\n Family: %s\n Header: %s" % (self.BlkType, + self.BlkNumber, + self.BlkLang, + self.BlkFlags, + self.MC7Size, + self.LoadSize, + self.LocalData, + self.SBBLength, + self.CheckSum, + self.Version, + self.CodeDate, + self.IntfDate, + self.Author, + self.Family, + self.Header) + + +class S7DataItem(ctypes.Structure): + _pack_ = 1 + _fields_ = [ + ( + "Area", ctypes.c_int32), + ( + "WordLen", ctypes.c_int32), + ( + "Result", ctypes.c_int32), + ( + "DBNumber", ctypes.c_int32), + ( + "Start", ctypes.c_int32), + ( + "Amount", ctypes.c_int32), + ( + "pData", ctypes.POINTER(ctypes.c_uint8))] + + +class S7CpuInfo(ctypes.Structure): + _fields_ = [ + ( + "ModuleTypeName", ctypes.c_char * 33), + ( + "SerialNumber", ctypes.c_char * 25), + ( + "ASName", ctypes.c_char * 25), + ( + "Copyright", ctypes.c_char * 27), + ( + "ModuleName", ctypes.c_char * 25)] diff --git a/APPS_UNCOMPILED/lib/snap7/util.py b/APPS_UNCOMPILED/lib/snap7/util.py new file mode 100644 index 0000000..955acd4 --- /dev/null +++ b/APPS_UNCOMPILED/lib/snap7/util.py @@ -0,0 +1,420 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/snap7/util.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 14968 bytes +''' +This module contains utility functions for working with PLC DB objects. +There are functions to work with the raw bytearray data snap7 functions return +In order to work with this data you need to make python able to work with the +PLC bytearray data. + +For example code see test_util.py and example.py in the example folder. + +example:: + + spec/DB layout + + # Byte index Variable name Datatype + layout=""" + 4 ID INT + 6 NAME STRING[6] + + 12.0 testbool1 BOOL + 12.1 testbool2 BOOL + 12.2 testbool3 BOOL + 12.3 testbool4 BOOL + 12.4 testbool5 BOOL + 12.5 testbool6 BOOL + 12.6 testbool7 BOOL + 12.7 testbool8 BOOL + 13 testReal REAL + 17 testDword DWORD + """ + + client = snap7.client.Client() + client.connect('192.168.200.24', 0, 3) + + # this looks confusing but this means uploading from the PLC to YOU + # so downloading in the PC world :) + + all_data = client.upload(db_number) + + simple: + + db1 = snap7.util.DB( + db_number, # the db we use + all_data, # bytearray from the plc + layout, # layout specification DB variable data + # A DB specification is the specification of a + # DB object in the PLC you can find it using + # the dataview option on a DB object in PCS7 + + 17+2, # size of the specification 17 is start + # of last value + # which is a DWORD which is 2 bytes, + + 1, # number of row's / specifications + + id_field='ID', # field we can use to identify a row. + # default index is used + layout_offset=4, # sometimes specification does not start a 0 + # like in our example + db_offset=0 # At which point in 'all_data' should we start + # reading. if could be that the specification + # does not start at 0 + ) + + Now we can use db1 in python as a dict. if 'ID' contains + the 'test' we can identify the 'test' row in the all_data bytearray + + To test of you layout matches the data from the plc you can + just print db1[0] or db['test'] in the example + + db1['test']['testbool1'] = 0 + + If we do not specify a id_field this should work to read out the + same data. + + db1[0]['testbool1'] + + to read and write a single Row from the plc. takes like 5ms! + + db1['test'].write() + + db1['test'].read() + +''' +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + +import struct, logging +from snap7 import six +import re +logger = logging.getLogger(__name__) + +def get_bool(_bytearray, byte_index, bool_index): + """ + Get the boolean value from location in bytearray + """ + index_value = 1 << bool_index + byte_value = _bytearray[byte_index] + current_value = byte_value & index_value + return current_value == index_value + + +def set_bool(_bytearray, byte_index, bool_index, value): + """ + Set boolean value on location in bytearray + """ + if not value in (0, 1, True, False): + raise AssertionError + else: + current_value = get_bool(_bytearray, byte_index, bool_index) + index_value = 1 << bool_index + if current_value == value: + return + if value: + _bytearray[byte_index] += index_value + else: + _bytearray[byte_index] -= index_value + + +def set_int(bytearray_, byte_index, _int): + """ + Set value in bytearray to int + """ + _int = int(_int) + _bytes = struct.unpack("2B", struct.pack(">h", _int)) + bytearray_[byte_index[:byte_index + 2]] = _bytes + return bytearray_ + + +def get_int(bytearray_, byte_index): + """ + Get int value from bytearray. + + int are represented in two bytes + """ + data = bytearray_[byte_index[:byte_index + 2]] + data[1] = data[1] & 255 + data[0] = data[0] & 255 + packed = (struct.pack)(*('2B', ), *data) + value = struct.unpack(">h", packed)[0] + return value + + +def set_real(_bytearray, byte_index, real): + """ + Set Real value + + make 4 byte data from real + + """ + real = float(real) + real = struct.pack(">f", real) + _bytes = struct.unpack("4B", real) + for i, b in enumerate(_bytes): + _bytearray[byte_index + i] = b + + +def get_real(_bytearray, byte_index): + """ + Get real value. create float from 4 bytes + """ + x = _bytearray[byte_index[:byte_index + 4]] + real = struct.unpack(">f", (struct.pack)(*('4B', ), *x))[0] + return real + + +def set_string(_bytearray, byte_index, value, max_size): + """ + Set string value + + :params value: string data + :params max_size: max possible string size + """ + if six.PY2 and not isinstance(value, (str, unicode)): + raise AssertionError + else: + assert isinstance(value, str) + size = len(value) + if size > max_size: + raise ValueError("size %s > max_size %s %s" % (size, max_size, value)) + _bytearray[byte_index + 1] = len(value) + i = 0 + for i, c in enumerate(value): + _bytearray[byte_index + 2 + i] = ord(c) + + for r in range(i + 1, _bytearray[byte_index]): + _bytearray[byte_index + 2 + r] = ord(" ") + + +def get_string(_bytearray, byte_index, max_size): + """ + parse string from bytearray + """ + size = _bytearray[byte_index + 1] + if max_size < size: + logger.error("the string is too big for the size encountered in specification") + logger.error("WRONG SIZED STRING ENCOUNTERED") + size = max_size + data = map(chr, _bytearray[(byte_index + 2)[:byte_index + 2 + size]]) + return "".join(data) + + +def get_dword(_bytearray, byte_index): + data = _bytearray[byte_index[:byte_index + 4]] + dword = struct.unpack(">I", (struct.pack)(*('4B', ), *data))[0] + return dword + + +def set_dword(_bytearray, byte_index, dword): + dword = int(dword) + _bytes = struct.unpack("4B", struct.pack(">I", dword)) + for i, b in enumerate(_bytes): + _bytearray[byte_index + i] = b + + +def parse_specification(db_specification): + """ + Create a db specification derived from a + dataview of a db in which the byte layout + is specified + """ + parsed_db_specification = OrderedDict() + for line in db_specification.split("\n"): + if line: + row = line.startswith("#") or line.split("#")[0] + index, var_name, _type = row.split() + parsed_db_specification[var_name] = (index, _type) + + return parsed_db_specification + + +class DB(object): + __doc__ = '\n Manage a DB bytearray block given a specification\n of the Layout.\n\n It is possible to have many repetitive instances of\n a specification this is called a "row".\n\n probably most usecases there is just one row\n\n db1[0][\'testbool1\'] = test\n db1.write() # puts data in plc\n ' + _bytearray = None + specification = None + row_size = None + layout_offset = None + db_offset = None + + def __init__(self, db_number, _bytearray, specification, row_size, size, id_field=None, db_offset=0, layout_offset=0, row_offset=0): + self.db_number = db_number + self.size = size + self.row_size = row_size + self.id_field = id_field + self.db_offset = db_offset + self.layout_offset = layout_offset + self.row_offset = row_offset + self._bytearray = _bytearray + self.specification = specification + self.index = OrderedDict() + self.make_rows() + + def make_rows(self): + id_field = self.id_field + row_size = self.row_size + specification = self.specification + layout_offset = self.layout_offset + for i in range(self.size): + db_offset = i * row_size + self.db_offset + row = DB_Row(self, specification, + row_size=row_size, + db_offset=db_offset, + layout_offset=layout_offset, + row_offset=(self.row_offset)) + key = row[id_field] if id_field else i + if key: + if key in self.index: + msg = "%s not unique!" % key + logger.error(msg) + self.index[key] = row + + def __getitem__(self, key, default=None): + return self.index.get(key, default) + + def __iter__(self): + for key, row in self.index.items(): + yield (key, row) + + def __len__(self): + return len(self.index) + + def set_data(self, _bytearray): + assert isinstance(_bytearray, bytearray) + self._bytearray = _bytearray + + +class DB_Row(object): + __doc__ = "\n Provide ROW API for DB bytearray\n " + _bytearray = None + _specification = None + + def __init__(self, _bytearray, _specification, row_size=0, db_offset=0, layout_offset=0, row_offset=0): + self.db_offset = db_offset + self.layout_offset = layout_offset + self.row_size = row_size + self.row_offset = row_offset + assert isinstance(_bytearray, (bytearray, DB)) + self._bytearray = _bytearray + self._specification = parse_specification(_specification) + + def get_bytearray(self): + """ + return bytearray from self or DB parent + """ + if isinstance(self._bytearray, DB): + return self._bytearray._bytearray + return self._bytearray + + def export(self): + """ + export dictionary with values + """ + data = {} + for key in self._specification: + data[key] = self[key] + + return data + + def __getitem__(self, key): + """ + Get a specific db field + """ + assert key in self._specification + index, _type = self._specification[key] + return self.get_value(index, _type) + + def __setitem__(self, key, value): + assert key in self._specification + index, _type = self._specification[key] + self.set_value(index, _type, value) + + def __repr__(self): + string = "" + for var_name, (index, _type) in self._specification.items(): + string = "%s\n%-20s %-10s" % (string, var_name, + self.get_value(index, _type)) + + return string + + def unchanged(self, _bytearray): + if self.get_bytearray() == _bytearray: + return True + return False + + def get_offset(self, byte_index): + """ + Calculate correct beginning position for a row + the db_offset = row_size * index + """ + return int(byte_index) - self.layout_offset + self.db_offset + + def get_value(self, byte_index, _type): + _bytearray = self.get_bytearray() + if _type == "BOOL": + byte_index, bool_index = byte_index.split(".") + return get_bool(_bytearray, self.get_offset(byte_index), int(bool_index)) + byte_index = self.get_offset(byte_index) + if _type.startswith("STRING"): + max_size = re.search("\\d+", _type).group(0) + max_size = int(max_size) + return get_string(_bytearray, byte_index, max_size) + if _type == "REAL": + return get_real(_bytearray, byte_index) + if _type == "DWORD": + return get_dword(_bytearray, byte_index) + if _type == "INT": + return get_int(_bytearray, byte_index) + raise ValueError + + def set_value(self, byte_index, _type, value): + _bytearray = self.get_bytearray() + if _type == "BOOL": + byte_index, bool_index = byte_index.split(".") + return set_bool(_bytearray, self.get_offset(byte_index), int(bool_index), value) + byte_index = self.get_offset(byte_index) + if _type.startswith("STRING"): + max_size = re.search("\\d+", _type).group(0) + max_size = int(max_size) + return set_string(_bytearray, byte_index, value, max_size) + if _type == "REAL": + return set_real(_bytearray, byte_index, value) + if _type == "DWORD": + return set_dword(_bytearray, byte_index, value) + if _type == "INT": + return set_int(_bytearray, byte_index, value) + raise ValueError + + def write(self, client): + """ + Write current data to db in plc + """ + assert isinstance(self._bytearray, DB) + assert self.row_size >= 0 + db_nr = self._bytearray.db_number + offset = self.db_offset + data = self.get_bytearray()[offset[:offset + self.row_size]] + db_offset = self.db_offset + if self.row_offset: + data = data[self.row_offset[:None]] + db_offset += self.row_offset + client.db_write(db_nr, db_offset, data) + + def read(self, client): + """ + read current data of db row from plc + """ + assert isinstance(self._bytearray, DB) + assert self.row_size >= 0 + db_nr = self._bytearray.db_number + _bytearray = client.db_read(db_nr, self.db_offset, self.row_size) + data = self.get_bytearray() + for i, b in enumerate(_bytearray): + data[i + self.db_offset] = b diff --git a/APPS_UNCOMPILED/lib/socks.py b/APPS_UNCOMPILED/lib/socks.py new file mode 100644 index 0000000..afc99b6 --- /dev/null +++ b/APPS_UNCOMPILED/lib/socks.py @@ -0,0 +1,685 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/socks.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 31086 bytes +from base64 import b64encode +try: + from collections.abc import Callable +except ImportError: + from collections import Callable + +from errno import EOPNOTSUPP, EINVAL, EAGAIN +import functools +from io import BytesIO +import logging, os +from os import SEEK_CUR +import socket, struct, sys +__version__ = "1.7.1" +if os.name == "nt": + if sys.version_info < (3, 0): + try: + import win_inet_pton + except ImportError: + raise ImportError("To run PySocks on Windows you must install win_inet_pton") + +log = logging.getLogger(__name__) +PROXY_TYPE_SOCKS4 = SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = SOCKS5 = 2 +PROXY_TYPE_HTTP = HTTP = 3 +PROXY_TYPES = {'SOCKS4':SOCKS4, + 'SOCKS5':SOCKS5, 'HTTP':HTTP} +PRINTABLE_PROXY_TYPES = dict(zip(PROXY_TYPES.values(), PROXY_TYPES.keys())) +_orgsocket = _orig_socket = socket.socket + +def set_self_blocking(function): + + @functools.wraps(function) + def wrapper(*args, **kwargs): + self = args[0] + try: + try: + _is_blocking = self.gettimeout() + if _is_blocking == 0: + self.setblocking(True) + return function(*args, **kwargs) + except Exception as e: + try: + raise + finally: + e = None + del e + + finally: + if _is_blocking == 0: + self.setblocking(False) + + return wrapper + + +class ProxyError(IOError): + __doc__ = "Socket_err contains original socket.error exception." + + def __init__(self, msg, socket_err=None): + self.msg = msg + self.socket_err = socket_err + if socket_err: + self.msg += ": {}".format(socket_err) + + def __str__(self): + return self.msg + + +class GeneralProxyError(ProxyError): + pass + + +class ProxyConnectionError(ProxyError): + pass + + +class SOCKS5AuthError(ProxyError): + pass + + +class SOCKS5Error(ProxyError): + pass + + +class SOCKS4Error(ProxyError): + pass + + +class HTTPError(ProxyError): + pass + + +SOCKS4_ERRORS = {91:"Request rejected or failed", + 92:"Request rejected because SOCKS server cannot connect to identd on the client", + 93:"Request rejected because the client program and identd report different user-ids"} +SOCKS5_ERRORS = { + 1: '"General SOCKS server failure"', + 2: '"Connection not allowed by ruleset"', + 3: '"Network unreachable"', + 4: '"Host unreachable"', + 5: '"Connection refused"', + 6: '"TTL expired"', + 7: '"Command not supported, or protocol error"', + 8: '"Address type not supported"'} +DEFAULT_PORTS = {SOCKS4: 1080, SOCKS5: 1080, HTTP: 8080} + +def set_default_proxy(proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None): + """Sets a default proxy. + + All further socksocket objects will use the default unless explicitly + changed. All parameters are as for socket.set_proxy().""" + socksocket.default_proxy = ( + proxy_type, addr, port, rdns, + username.encode() if username else None, + password.encode() if password else None) + + +def setdefaultproxy(*args, **kwargs): + if "proxytype" in kwargs: + kwargs["proxy_type"] = kwargs.pop("proxytype") + return set_default_proxy(*args, **kwargs) + + +def get_default_proxy(): + """Returns the default proxy, set by set_default_proxy.""" + return socksocket.default_proxy + + +getdefaultproxy = get_default_proxy + +def wrap_module(module): + """Attempts to replace a module's socket library with a SOCKS socket. + + Must set a default proxy using set_default_proxy(...) first. This will + only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category.""" + if socksocket.default_proxy: + module.socket.socket = socksocket + else: + raise GeneralProxyError("No default proxy specified") + + +wrapmodule = wrap_module + +def create_connection(dest_pair, timeout=None, source_address=None, proxy_type=None, proxy_addr=None, proxy_port=None, proxy_rdns=True, proxy_username=None, proxy_password=None, socket_options=None): + """create_connection(dest_pair, *[, timeout], **proxy_args) -> socket object + + Like socket.create_connection(), but connects to proxy + before returning the socket object. + + dest_pair - 2-tuple of (IP/hostname, port). + **proxy_args - Same args passed to socksocket.set_proxy() if present. + timeout - Optional socket timeout value, in seconds. + source_address - tuple (host, port) for the socket to bind to as its source + address before connecting (only for compatibility) + """ + remote_host, remote_port = dest_pair + if remote_host.startswith("["): + remote_host = remote_host.strip("[]") + if proxy_addr: + if proxy_addr.startswith("["): + proxy_addr = proxy_addr.strip("[]") + err = None + for r in socket.getaddrinfo(proxy_addr, proxy_port, 0, socket.SOCK_STREAM): + family, socket_type, proto, canonname, sa = r + sock = None + try: + sock = socksocket(family, socket_type, proto) + if socket_options: + for opt in socket_options: + (sock.setsockopt)(*opt) + + if isinstance(timeout, (int, float)): + sock.settimeout(timeout) + if proxy_type: + sock.set_proxy(proxy_type, proxy_addr, proxy_port, proxy_rdns, proxy_username, proxy_password) + if source_address: + sock.bind(source_address) + sock.connect((remote_host, remote_port)) + return sock + except (socket.error, ProxyError) as e: + try: + err = e + if sock: + sock.close() + sock = None + finally: + e = None + del e + + if err: + raise err + raise socket.error("gai returned empty list.") + + +class _BaseSocket(socket.socket): + __doc__ = "Allows Python 2 delegated methods such as send() to be overridden." + + def __init__(self, *pos, **kw): + (_orig_socket.__init__)(self, *pos, **kw) + self._savedmethods = dict() + for name in self._savenames: + self._savedmethods[name] = getattr(self, name) + delattr(self, name) + + _savenames = list() + + +def _makemethod(name): + return (lambda self, *pos, **kw: (self._savedmethods[name])(*pos, **kw)) + + +for name in ('sendto', 'send', 'recvfrom', 'recv'): + method = getattr(_BaseSocket, name, None) + if not isinstance(method, Callable): + _BaseSocket._savenames.append(name) + setattr(_BaseSocket, name, _makemethod(name)) + +class socksocket(_BaseSocket): + __doc__ = 'socksocket([family[, type[, proto]]]) -> socket object\n\n Open a SOCKS enabled socket. The parameters are the same as\n those of the standard socket init. In order for SOCKS to work,\n you must specify family=AF_INET and proto=0.\n The "type" argument must be either SOCK_STREAM or SOCK_DGRAM.\n ' + default_proxy = None + + def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, *args, **kwargs): + if type not in (socket.SOCK_STREAM, socket.SOCK_DGRAM): + msg = "Socket type must be stream or datagram, not {!r}" + raise ValueError(msg.format(type)) + else: + (super(socksocket, self).__init__)(family, type, proto, *args, **kwargs) + self._proxyconn = None + if self.default_proxy: + self.proxy = self.default_proxy + else: + self.proxy = (None, None, None, None, None, None) + self.proxy_sockname = None + self.proxy_peername = None + self._timeout = None + + def _readall(self, file, count): + """Receive EXACTLY the number of bytes requested from the file object. + + Blocks until the required number of bytes have been received.""" + data = b'' + while len(data) < count: + d = file.read(count - len(data)) + if not d: + raise GeneralProxyError("Connection closed unexpectedly") + data += d + + return data + + def settimeout(self, timeout): + self._timeout = timeout + try: + peer = self.get_proxy_peername() + super(socksocket, self).settimeout(self._timeout) + except socket.error: + pass + + def gettimeout(self): + return self._timeout + + def setblocking(self, v): + if v: + self.settimeout(None) + else: + self.settimeout(0.0) + + def set_proxy(self, proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None): + """ Sets the proxy to be used. + + proxy_type - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be performed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided.""" + self.proxy = ( + proxy_type, addr, port, rdns, + username.encode() if username else None, + password.encode() if password else None) + + def setproxy(self, *args, **kwargs): + if "proxytype" in kwargs: + kwargs["proxy_type"] = kwargs.pop("proxytype") + return (self.set_proxy)(*args, **kwargs) + + def bind(self, *pos, **kw): + """Implements proxy connection for UDP sockets. + + Happens during the bind() phase.""" + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + if not proxy_type or self.type != socket.SOCK_DGRAM: + return (_orig_socket.bind)(self, *pos, **kw) + if self._proxyconn: + raise socket.error(EINVAL, "Socket already bound to an address") + if proxy_type != SOCKS5: + msg = "UDP only supported by SOCKS5 proxy type" + raise socket.error(EOPNOTSUPP, msg) + (super(socksocket, self).bind)(*pos, **kw) + _, port = self.getsockname() + dst = ("0", port) + self._proxyconn = _orig_socket() + proxy = self._proxy_addr() + self._proxyconn.connect(proxy) + UDP_ASSOCIATE = b'\x03' + _, relay = self._SOCKS5_request(self._proxyconn, UDP_ASSOCIATE, dst) + host, _ = proxy + _, port = relay + super(socksocket, self).connect((host, port)) + super(socksocket, self).settimeout(self._timeout) + self.proxy_sockname = ('0.0.0.0', 0) + + def sendto(self, bytes, *args, **kwargs): + if self.type != socket.SOCK_DGRAM: + return (super(socksocket, self).sendto)(bytes, *args, **kwargs) + if not self._proxyconn: + self.bind(('', 0)) + address = args[-1] + flags = args[None[:-1]] + header = BytesIO() + RSV = b'\x00\x00' + header.write(RSV) + STANDALONE = b'\x00' + header.write(STANDALONE) + self._write_SOCKS5_address(address, header) + sent = (super(socksocket, self).send)(header.getvalue() + bytes, *flags, **kwargs) + return sent - header.tell() + + def send(self, bytes, flags=0, **kwargs): + if self.type == socket.SOCK_DGRAM: + return (self.sendto)(bytes, flags, (self.proxy_peername), **kwargs) + return (super(socksocket, self).send)(bytes, flags, **kwargs) + + def recvfrom(self, bufsize, flags=0): + if self.type != socket.SOCK_DGRAM: + return super(socksocket, self).recvfrom(bufsize, flags) + elif not self._proxyconn: + self.bind(('', 0)) + buf = BytesIO(super(socksocket, self).recv(bufsize + 1024, flags)) + buf.seek(2, SEEK_CUR) + frag = buf.read(1) + if ord(frag): + raise NotImplementedError("Received UDP packet fragment") + fromhost, fromport = self._read_SOCKS5_address(buf) + if self.proxy_peername: + peerhost, peerport = self.proxy_peername + if fromhost != peerhost or peerport not in (0, fromport): + raise socket.error(EAGAIN, "Packet filtered") + return ( + buf.read(bufsize), (fromhost, fromport)) + + def recv(self, *pos, **kw): + bytes, _ = (self.recvfrom)(*pos, **kw) + return bytes + + def close(self): + if self._proxyconn: + self._proxyconn.close() + return super(socksocket, self).close() + + def get_proxy_sockname(self): + """Returns the bound IP address and port number at the proxy.""" + return self.proxy_sockname + + getproxysockname = get_proxy_sockname + + def get_proxy_peername(self): + """ + Returns the IP and port number of the proxy. + """ + return self.getpeername() + + getproxypeername = get_proxy_peername + + def get_peername(self): + """Returns the IP address and port number of the destination machine. + + Note: get_proxy_peername returns the proxy.""" + return self.proxy_peername + + getpeername = get_peername + + def _negotiate_SOCKS5(self, *dest_addr): + """Negotiates a stream connection through a SOCKS5 server.""" + CONNECT = b'\x01' + self.proxy_peername, self.proxy_sockname = self._SOCKS5_request(self, CONNECT, dest_addr) + + def _SOCKS5_request(self, conn, cmd, dst): + """ + Send SOCKS5 request with given command (CMD field) and + address (DST field). Returns resolved DST address that was used. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + writer = conn.makefile("wb") + reader = conn.makefile("rb", 0) + try: + if username: + if password: + writer.write(b'\x05\x02\x00\x02') + else: + writer.write(b'\x05\x01\x00') + writer.flush() + chosen_auth = self._readall(reader, 2) + if chosen_auth[0[:1]] != b'\x05': + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + if chosen_auth[1[:2]] == b'\x02': + if not (username and password): + raise SOCKS5AuthError("No username/password supplied. Server requested username/password authentication") + writer.write(b'\x01' + chr(len(username)).encode() + username + chr(len(password)).encode() + password) + writer.flush() + auth_status = self._readall(reader, 2) + if auth_status[0[:1]] != b'\x01': + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + if auth_status[1[:2]] != b'\x00': + raise SOCKS5AuthError("SOCKS5 authentication failed") + elif chosen_auth[1[:2]] != b'\x00': + if chosen_auth[1[:2]] == b'\xff': + raise SOCKS5AuthError("All offered SOCKS5 authentication methods were rejected") + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + writer.write(b'\x05' + cmd + b'\x00') + resolved = self._write_SOCKS5_address(dst, writer) + writer.flush() + resp = self._readall(reader, 3) + if resp[0[:1]] != b'\x05': + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + status = ord(resp[1[:2]]) + if status != 0: + error = SOCKS5_ERRORS.get(status, "Unknown error") + raise SOCKS5Error("{:#04x}: {}".format(status, error)) + bnd = self._read_SOCKS5_address(reader) + super(socksocket, self).settimeout(self._timeout) + return (resolved, bnd) + finally: + reader.close() + writer.close() + + def _write_SOCKS5_address(self, addr, file): + """ + Return the host and port packed for the SOCKS5 protocol, + and the resolved address as a tuple object. + """ + host, port = addr + proxy_type, _, _, rdns, username, password = self.proxy + family_to_byte = {(socket.AF_INET): b'\x01', (socket.AF_INET6): b'\x04'} + for family in (socket.AF_INET, socket.AF_INET6): + try: + addr_bytes = socket.inet_pton(family, host) + file.write(family_to_byte[family] + addr_bytes) + host = socket.inet_ntop(family, addr_bytes) + file.write(struct.pack(">H", port)) + return (host, port) + except socket.error: + continue + + if rdns: + host_bytes = host.encode("idna") + file.write(b'\x03' + chr(len(host_bytes)).encode() + host_bytes) + else: + addresses = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG) + target_addr = addresses[0] + family = target_addr[0] + host = target_addr[4][0] + addr_bytes = socket.inet_pton(family, host) + file.write(family_to_byte[family] + addr_bytes) + host = socket.inet_ntop(family, addr_bytes) + file.write(struct.pack(">H", port)) + return (host, port) + + def _read_SOCKS5_address(self, file): + atyp = self._readall(file, 1) + if atyp == b'\x01': + addr = socket.inet_ntoa(self._readall(file, 4)) + else: + if atyp == b'\x03': + length = self._readall(file, 1) + addr = self._readall(file, ord(length)) + else: + if atyp == b'\x04': + addr = socket.inet_ntop(socket.AF_INET6, self._readall(file, 16)) + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + port = struct.unpack(">H", self._readall(file, 2))[0] + return (addr, port) + + def _negotiate_SOCKS4(self, dest_addr, dest_port): + """Negotiates a connection through a SOCKS4 server.""" + proxy_type, addr, port, rdns, username, password = self.proxy + writer = self.makefile("wb") + reader = self.makefile("rb", 0) + try: + remote_resolve = False + try: + addr_bytes = socket.inet_aton(dest_addr) + except socket.error: + if rdns: + addr_bytes = b'\x00\x00\x00\x01' + remote_resolve = True + else: + addr_bytes = socket.inet_aton(socket.gethostbyname(dest_addr)) + + writer.write(struct.pack(">BBH", 4, 1, dest_port)) + writer.write(addr_bytes) + if username: + writer.write(username) + else: + writer.write(b'\x00') + if remote_resolve: + writer.write(dest_addr.encode("idna") + b'\x00') + writer.flush() + resp = self._readall(reader, 8) + if resp[0[:1]] != b'\x00': + raise GeneralProxyError("SOCKS4 proxy server sent invalid data") + status = ord(resp[1[:2]]) + if status != 90: + error = SOCKS4_ERRORS.get(status, "Unknown error") + raise SOCKS4Error("{:#04x}: {}".format(status, error)) + self.proxy_sockname = ( + socket.inet_ntoa(resp[4[:None]]), + struct.unpack(">H", resp[2[:4]])[0]) + if remote_resolve: + self.proxy_peername = ( + socket.inet_ntoa(addr_bytes), dest_port) + else: + self.proxy_peername = ( + dest_addr, dest_port) + finally: + reader.close() + writer.close() + + def _negotiate_HTTP(self, dest_addr, dest_port): + """Negotiates a connection through an HTTP server. + + NOTE: This currently only supports HTTP CONNECT-style proxies.""" + proxy_type, addr, port, rdns, username, password = self.proxy + addr = dest_addr if rdns else socket.gethostbyname(dest_addr) + http_headers = [ + b'CONNECT ' + addr.encode("idna") + b':' + str(dest_port).encode() + b' HTTP/1.1', + b'Host: ' + dest_addr.encode("idna")] + if username: + if password: + http_headers.append(b'Proxy-Authorization: basic ' + b64encode(username + b':' + password)) + http_headers.append(b'\r\n') + self.sendall((b'\r\n').join(http_headers)) + fobj = self.makefile() + status_line = fobj.readline() + fobj.close() + if not status_line: + raise GeneralProxyError("Connection closed unexpectedly") + try: + proto, status_code, status_msg = status_line.split(" ", 2) + except ValueError: + raise GeneralProxyError("HTTP proxy server sent invalid response") + + if not proto.startswith("HTTP/"): + raise GeneralProxyError("Proxy server does not appear to be an HTTP proxy") + try: + status_code = int(status_code) + except ValueError: + raise HTTPError("HTTP proxy server did not return a valid HTTP status") + + if status_code != 200: + error = "{}: {}".format(status_code, status_msg) + if status_code in (400, 403, 405): + error += "\n[*] Note: The HTTP proxy server may not be supported by PySocks (must be a CONNECT tunnel proxy)" + raise HTTPError(error) + self.proxy_sockname = (b'0.0.0.0', 0) + self.proxy_peername = (addr, dest_port) + + _proxy_negotiators = {SOCKS4: _negotiate_SOCKS4, + SOCKS5: _negotiate_SOCKS5, + HTTP: _negotiate_HTTP} + + @set_self_blocking + def connect(self, dest_pair, catch_errors=None): + """ + Connects to the specified destination through a proxy. + Uses the same API as socket's connect(). + To select the proxy server, use set_proxy(). + + dest_pair - 2-tuple of (IP/hostname, port). + """ + if not len(dest_pair) != 2: + if dest_pair[0].startswith("["): + raise socket.error("PySocks doesn't support IPv6: %s" % str(dest_pair)) + dest_addr, dest_port = dest_pair + if self.type == socket.SOCK_DGRAM: + if not self._proxyconn: + self.bind(('', 0)) + dest_addr = socket.gethostbyname(dest_addr) + if dest_addr == "0.0.0.0": + self.proxy_peername = dest_port or None + else: + self.proxy_peername = ( + dest_addr, dest_port) + return + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + if isinstance(dest_pair, (list, tuple)) and not len(dest_pair) != 2: + if not (dest_addr and isinstance(dest_port, int)): + raise GeneralProxyError("Invalid destination-connection (host, port) pair") + super(socksocket, self).settimeout(self._timeout) + if proxy_type is None: + self.proxy_peername = dest_pair + super(socksocket, self).settimeout(self._timeout) + super(socksocket, self).connect((dest_addr, dest_port)) + return + else: + proxy_addr = self._proxy_addr() + try: + super(socksocket, self).connect(proxy_addr) + except socket.error as error: + try: + self.close() + if not catch_errors: + proxy_addr, proxy_port = proxy_addr + proxy_server = "{}:{}".format(proxy_addr, proxy_port) + printable_type = PRINTABLE_PROXY_TYPES[proxy_type] + msg = "Error connecting to {} proxy {}".format(printable_type, proxy_server) + log.debug("%s due to: %s", msg, error) + raise ProxyConnectionError(msg, error) + else: + raise error + finally: + error = None + del error + + else: + try: + negotiate = self._proxy_negotiators[proxy_type] + negotiate(self, dest_addr, dest_port) + except socket.error as error: + try: + if not catch_errors: + self.close() + raise GeneralProxyError("Socket error", error) + else: + raise error + finally: + error = None + del error + + except ProxyError: + self.close() + raise + + @set_self_blocking + def connect_ex(self, dest_pair): + """ https://docs.python.org/3/library/socket.html#socket.socket.connect_ex + Like connect(address), but return an error indicator instead of raising an exception for errors returned by the C-level connect() call (other problems, such as "host not found" can still raise exceptions). + """ + try: + self.connect(dest_pair, catch_errors=True) + return 0 + except OSError as e: + try: + if e.errno: + return e.errno + raise + finally: + e = None + del e + + def _proxy_addr(self): + """ + Return proxy address to connect to as tuple object + """ + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + proxy_port = proxy_port or DEFAULT_PORTS.get(proxy_type) + if not proxy_port: + raise GeneralProxyError("Invalid proxy type") + return ( + proxy_addr, proxy_port) diff --git a/APPS_UNCOMPILED/lib/sockshandler.py b/APPS_UNCOMPILED/lib/sockshandler.py new file mode 100644 index 0000000..64d0912 --- /dev/null +++ b/APPS_UNCOMPILED/lib/sockshandler.py @@ -0,0 +1,133 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/sockshandler.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 3966 bytes +""" +SocksiPy + urllib2 handler + +version: 0.3 +author: e + +This module provides a Handler which you can use with urllib2 to allow it to tunnel your connection through a socks.sockssocket socket, with out monkey patching the original socket... +""" +import socket, ssl +try: + import urllib2, httplib +except ImportError: + import urllib.request as urllib2 + import http.client as httplib + +import socks + +def merge_dict(a, b): + d = a.copy() + d.update(b) + return d + + +def is_ip(s): + try: + if ":" in s: + socket.inet_pton(socket.AF_INET6, s) + else: + if "." in s: + socket.inet_aton(s) + else: + return False + except: + return False + return True + + +socks4_no_rdns = set() + +class SocksiPyConnection(httplib.HTTPConnection): + + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = ( + proxytype, proxyaddr, proxyport, rdns, username, password) + (httplib.HTTPConnection.__init__)(self, *args, **kwargs) + + def connect(self): + proxytype, proxyaddr, proxyport, rdns, username, password = self.proxyargs + rdns = rdns and proxyaddr not in socks4_no_rdns + while True: + try: + sock = socks.create_connection(( + self.host, self.port), self.timeout, None, proxytype, proxyaddr, proxyport, rdns, username, password, ( + ( + socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),)) + break + except socks.SOCKS4Error as e: + try: + if rdns and "0x5b" in str(e): + rdns = is_ip(self.host) or False + socks4_no_rdns.add(proxyaddr) + else: + raise + finally: + e = None + del e + + self.sock = sock + + +class SocksiPyConnectionS(httplib.HTTPSConnection): + + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = ( + proxytype, proxyaddr, proxyport, rdns, username, password) + (httplib.HTTPSConnection.__init__)(self, *args, **kwargs) + + def connect(self): + SocksiPyConnection.connect(self) + self.sock = self._context.wrap_socket((self.sock), server_hostname=(self.host)) + if not self._context.check_hostname: + if self._check_hostname: + try: + ssl.match_hostname(self.sock.getpeercert(), self.host) + except Exception: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + +class SocksiPyHandler(urllib2.HTTPHandler, urllib2.HTTPSHandler): + + def __init__(self, *args, **kwargs): + self.args = args + self.kw = kwargs + urllib2.HTTPHandler.__init__(self) + + def http_open(self, req): + + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnection(self.args, host=host, port=port, timeout=timeout, **kw) + return conn + + return self.do_open(build, req) + + def https_open(self, req): + + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnectionS(self.args, host=host, port=port, timeout=timeout, **kw) + return conn + + return self.do_open(build, req) + + +if __name__ == "__main__": + import sys + try: + port = int(sys.argv[1]) + except (ValueError, IndexError): + port = 9050 + + opener = urllib2.build_opener(SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "localhost", port)) + print("HTTP: " + opener.open("http://httpbin.org/ip").read().decode()) + print("HTTPS: " + opener.open("https://httpbin.org/ip").read().decode()) diff --git a/APPS_UNCOMPILED/lib/sparkPlugB_pb2.py b/APPS_UNCOMPILED/lib/sparkPlugB_pb2.py new file mode 100644 index 0000000..8f69244 --- /dev/null +++ b/APPS_UNCOMPILED/lib/sparkPlugB_pb2.py @@ -0,0 +1,1790 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/sparkPlugB_pb2.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 61377 bytes +import sys +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +_sym_db = _symbol_database.Default() +DESCRIPTOR = _descriptor.FileDescriptor(name="SparkPlugB.proto", + package="", + syntax="proto2", + serialized_options=None, + serialized_pb=(_b('\n\x10SparkPlugB.proto"£\x14\n\x07Payload\x12\x11\n\ttimestamp\x18\x01 \x01(\x04\x12 \n\x07metrics\x18\x02 \x03(\x0b2\x0f.Payload.Metric\x12\x0b\n\x03seq\x18\x03 \x01(\x04\x12\r\n\x05bdseq\x18\x04 \x01(\x04\x12\x0c\n\x04uuid\x18\x05 \x01(\t\x12\x0c\n\x04body\x18\x06 \x01(\x0c\x1a\x85\x04\n\x08Template\x12\x0f\n\x07version\x18\x01 \x01(\t\x12 \n\x07metrics\x18\x02 \x03(\x0b2\x0f.Payload.Metric\x12/\n\nparameters\x18\x03 \x03(\x0b2\x1b.Payload.Template.Parameter\x12\x14\n\x0ctemplate_ref\x18\x04 \x01(\t\x12\x15\n\ris_definition\x18\x05 \x01(\x08\x1aÝ\x02\n\tParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\r\x12\x13\n\tint_value\x18\x03 \x01(\x05H\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x03H\x00\x12\x15\n\x0bfloat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0cdouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x12\x14\n\nuint_value\x18\t \x01(\rH\x00\x12\x15\n\x0bulong_value\x18\n \x01(\x04H\x00\x12N\n\x0fextension_value\x18\x0b \x01(\x0b23.Payload.Template.Parameter.ParameterValueExtensionH\x00\x1a#\n\x17ParameterValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02B\x07\n\x05value*\x08\x08\x06\x10\x80\x80\x80\x80\x02\x1aö\x03\n\x07DataSet\x12\x16\n\x0enum_of_columns\x18\x01 \x01(\x04\x12\x0f\n\x07columns\x18\x02 \x03(\t\x12\r\n\x05types\x18\x03 \x03(\r\x12"\n\x04rows\x18\x04 \x03(\x0b2\x14.Payload.DataSet.Row\x1aÂ\x02\n\x0cDataSetValue\x12\x13\n\tint_value\x18\x01 \x01(\x05H\x00\x12\x14\n\nlong_value\x18\x02 \x01(\x03H\x00\x12\x15\n\x0bfloat_value\x18\x03 \x01(\x02H\x00\x12\x16\n\x0cdouble_value\x18\x04 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x05 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12\x14\n\nuint_value\x18\x07 \x01(\rH\x00\x12\x15\n\x0bulong_value\x18\x08 \x01(\x04H\x00\x12N\n\x0fextension_value\x18\t \x01(\x0b23.Payload.DataSet.DataSetValue.DataSetValueExtensionH\x00\x1a!\n\x15DataSetValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02B\x07\n\x05value\x1a@\n\x03Row\x12/\n\x08elements\x18\x01 \x03(\x0b2\x1d.Payload.DataSet.DataSetValue*\x08\x08\x02\x10\x80\x80\x80\x80\x02*\x08\x08\x05\x10\x80\x80\x80\x80\x02\x1aÈ\x03\n\rPropertyValue\x12\x0c\n\x04type\x18\x01 \x01(\r\x12\x0f\n\x07is_null\x18\x02 \x01(\x08\x12\x13\n\tint_value\x18\x03 \x01(\x05H\x00\x12\x14\n\nlong_value\x18\x04 \x01(\x03H\x00\x12\x15\n\x0bfloat_value\x18\x05 \x01(\x02H\x00\x12\x16\n\x0cdouble_value\x18\x06 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x07 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x08 \x01(\tH\x00\x121\n\x11propertyset_value\x18\t \x01(\x0b2\x14.Payload.PropertySetH\x00\x126\n\x12propertysets_value\x18\n \x01(\x0b2\x18.Payload.PropertySetListH\x00\x12\x14\n\nuint_value\x18\x0b \x01(\rH\x00\x12\x15\n\x0bulong_value\x18\x0c \x01(\x04H\x00\x12H\n\x0fextension_value\x18\r \x01(\x0b2-.Payload.PropertyValue.PropertyValueExtensionH\x00\x1a"\n\x16PropertyValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02B\x07\n\x05value\x1aM\n\x0bPropertySet\x12\x0c\n\x04keys\x18\x01 \x03(\t\x12&\n\x06values\x18\x02 \x03(\x0b2\x16.Payload.PropertyValue*\x08\x08\x03\x10\x80\x80\x80\x80\x02\x1aF\n\x0fPropertySetList\x12)\n\x0bpropertyset\x18\x01 \x03(\x0b2\x14.Payload.PropertySet*\x08\x08\x02\x10\x80\x80\x80\x80\x02\x1a¤\x01\n\x08MetaData\x12\x15\n\ris_multi_part\x18\x01 \x01(\x08\x12\x14\n\x0ccontent_type\x18\x02 \x01(\t\x12\x0c\n\x04size\x18\x03 \x01(\x04\x12\x0b\n\x03seq\x18\x04 \x01(\x04\x12\x11\n\tfile_name\x18\x05 \x01(\t\x12\x11\n\tfile_type\x18\x06 \x01(\t\x12\x0b\n\x03md5\x18\x07 \x01(\t\x12\x13\n\x0bdescription\x18\x08 \x01(\t*\x08\x08\t\x10\x80\x80\x80\x80\x02\x1a\x96\x05\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05alias\x18\x02 \x01(\x04\x12\x11\n\ttimestamp\x18\x03 \x01(\x04\x12\x10\n\x08datatype\x18\x04 \x01(\r\x12\x15\n\ris_historical\x18\x05 \x01(\x08\x12\x14\n\x0cis_transient\x18\x06 \x01(\x08\x12\x0f\n\x07is_null\x18\x07 \x01(\x08\x12#\n\x08metadata\x18\x08 \x01(\x0b2\x11.Payload.MetaData\x12(\n\nproperties\x18\t \x01(\x0b2\x14.Payload.PropertySet\x12\x13\n\tint_value\x18\n \x01(\x05H\x00\x12\x14\n\nlong_value\x18\x0b \x01(\x03H\x00\x12\x15\n\x0bfloat_value\x18\x0c \x01(\x02H\x00\x12\x16\n\x0cdouble_value\x18\r \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x0e \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x0f \x01(\tH\x00\x12\x15\n\x0bbytes_value\x18\x10 \x01(\x0cH\x00\x12)\n\rdataset_value\x18\x11 \x01(\x0b2\x10.Payload.DataSetH\x00\x12+\n\x0etemplate_value\x18\x12 \x01(\x0b2\x11.Payload.TemplateH\x00\x12\x14\n\nuint_value\x18\x13 \x01(\rH\x00\x12\x15\n\x0bulong_value\x18\x14 \x01(\x04H\x00\x12\x13\n\tbcd_value\x18\x15 \x01(\rH\x00\x12\x15\n\x0bbcd32_value\x18\x16 \x01(\rH\x00\x12?\n\x0fextension_value\x18\x17 \x01(\x0b2$.Payload.Metric.MetricValueExtensionH\x00\x1a \n\x14MetricValueExtension*\x08\x08\x01\x10\x80\x80\x80\x80\x02B\x07\n\x05value*\x08\x08\x07\x10\x80\x80\x80\x80\x02'))) +_PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION = _descriptor.Descriptor(name="ParameterValueExtension", + full_name="Payload.Template.Parameter.ParameterValueExtension", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (1, 536870912)], + oneofs=[], + serialized_start=605, + serialized_end=640) +_PAYLOAD_TEMPLATE_PARAMETER = _descriptor.Descriptor(name="Parameter", + full_name="Payload.Template.Parameter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="name", + full_name="Payload.Template.Parameter.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="type", + full_name="Payload.Template.Parameter.type", + index=1, + number=2, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="int_value", + full_name="Payload.Template.Parameter.int_value", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="long_value", + full_name="Payload.Template.Parameter.long_value", + index=3, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="float_value", + full_name="Payload.Template.Parameter.float_value", + index=4, + number=5, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=(float(0)), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="double_value", + full_name="Payload.Template.Parameter.double_value", + index=5, + number=6, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=(float(0)), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="boolean_value", + full_name="Payload.Template.Parameter.boolean_value", + index=6, + number=7, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="string_value", + full_name="Payload.Template.Parameter.string_value", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="uint_value", + full_name="Payload.Template.Parameter.uint_value", + index=8, + number=9, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="ulong_value", + full_name="Payload.Template.Parameter.ulong_value", + index=9, + number=10, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="extension_value", + full_name="Payload.Template.Parameter.extension_value", + index=10, + number=11, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[ + _PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor(name="value", + full_name="Payload.Template.Parameter.value", + index=0, + containing_type=None, + fields=[])], + serialized_start=300, + serialized_end=649) +_PAYLOAD_TEMPLATE = _descriptor.Descriptor(name="Template", + full_name="Payload.Template", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="version", + full_name="Payload.Template.version", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="metrics", + full_name="Payload.Template.metrics", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="parameters", + full_name="Payload.Template.parameters", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="template_ref", + full_name="Payload.Template.template_ref", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="is_definition", + full_name="Payload.Template.is_definition", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[ + _PAYLOAD_TEMPLATE_PARAMETER], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (6, 536870912)], + oneofs=[], + serialized_start=142, + serialized_end=659) +_PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION = _descriptor.Descriptor(name="DataSetValueExtension", + full_name="Payload.DataSet.DataSetValue.DataSetValueExtension", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (1, 536870912)], + oneofs=[], + serialized_start=1046, + serialized_end=1079) +_PAYLOAD_DATASET_DATASETVALUE = _descriptor.Descriptor(name="DataSetValue", + full_name="Payload.DataSet.DataSetValue", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="int_value", + full_name="Payload.DataSet.DataSetValue.int_value", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="long_value", + full_name="Payload.DataSet.DataSetValue.long_value", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="float_value", + full_name="Payload.DataSet.DataSetValue.float_value", + index=2, + number=3, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=(float(0)), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="double_value", + full_name="Payload.DataSet.DataSetValue.double_value", + index=3, + number=4, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=(float(0)), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="boolean_value", + full_name="Payload.DataSet.DataSetValue.boolean_value", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="string_value", + full_name="Payload.DataSet.DataSetValue.string_value", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="uint_value", + full_name="Payload.DataSet.DataSetValue.uint_value", + index=6, + number=7, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="ulong_value", + full_name="Payload.DataSet.DataSetValue.ulong_value", + index=7, + number=8, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="extension_value", + full_name="Payload.DataSet.DataSetValue.extension_value", + index=8, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[ + _PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor(name="value", + full_name="Payload.DataSet.DataSetValue.value", + index=0, + containing_type=None, + fields=[])], + serialized_start=766, + serialized_end=1088) +_PAYLOAD_DATASET_ROW = _descriptor.Descriptor(name="Row", + full_name="Payload.DataSet.Row", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="elements", + full_name="Payload.DataSet.Row.elements", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (2, 536870912)], + oneofs=[], + serialized_start=1090, + serialized_end=1154) +_PAYLOAD_DATASET = _descriptor.Descriptor(name="DataSet", + full_name="Payload.DataSet", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="num_of_columns", + full_name="Payload.DataSet.num_of_columns", + index=0, + number=1, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="columns", + full_name="Payload.DataSet.columns", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="types", + full_name="Payload.DataSet.types", + index=2, + number=3, + type=13, + cpp_type=3, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="rows", + full_name="Payload.DataSet.rows", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[ + _PAYLOAD_DATASET_DATASETVALUE, _PAYLOAD_DATASET_ROW], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (5, 536870912)], + oneofs=[], + serialized_start=662, + serialized_end=1164) +_PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION = _descriptor.Descriptor(name="PropertyValueExtension", + full_name="Payload.PropertyValue.PropertyValueExtension", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (1, 536870912)], + oneofs=[], + serialized_start=1580, + serialized_end=1614) +_PAYLOAD_PROPERTYVALUE = _descriptor.Descriptor(name="PropertyValue", + full_name="Payload.PropertyValue", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="type", + full_name="Payload.PropertyValue.type", + index=0, + number=1, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="is_null", + full_name="Payload.PropertyValue.is_null", + index=1, + number=2, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="int_value", + full_name="Payload.PropertyValue.int_value", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="long_value", + full_name="Payload.PropertyValue.long_value", + index=3, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="float_value", + full_name="Payload.PropertyValue.float_value", + index=4, + number=5, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=(float(0)), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="double_value", + full_name="Payload.PropertyValue.double_value", + index=5, + number=6, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=(float(0)), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="boolean_value", + full_name="Payload.PropertyValue.boolean_value", + index=6, + number=7, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="string_value", + full_name="Payload.PropertyValue.string_value", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="propertyset_value", + full_name="Payload.PropertyValue.propertyset_value", + index=8, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="propertysets_value", + full_name="Payload.PropertyValue.propertysets_value", + index=9, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="uint_value", + full_name="Payload.PropertyValue.uint_value", + index=10, + number=11, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="ulong_value", + full_name="Payload.PropertyValue.ulong_value", + index=11, + number=12, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="extension_value", + full_name="Payload.PropertyValue.extension_value", + index=12, + number=13, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[ + _PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor(name="value", + full_name="Payload.PropertyValue.value", + index=0, + containing_type=None, + fields=[])], + serialized_start=1167, + serialized_end=1623) +_PAYLOAD_PROPERTYSET = _descriptor.Descriptor(name="PropertySet", + full_name="Payload.PropertySet", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="keys", + full_name="Payload.PropertySet.keys", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="values", + full_name="Payload.PropertySet.values", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (3, 536870912)], + oneofs=[], + serialized_start=1625, + serialized_end=1702) +_PAYLOAD_PROPERTYSETLIST = _descriptor.Descriptor(name="PropertySetList", + full_name="Payload.PropertySetList", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="propertyset", + full_name="Payload.PropertySetList.propertyset", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (2, 536870912)], + oneofs=[], + serialized_start=1704, + serialized_end=1774) +_PAYLOAD_METADATA = _descriptor.Descriptor(name="MetaData", + full_name="Payload.MetaData", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="is_multi_part", + full_name="Payload.MetaData.is_multi_part", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="content_type", + full_name="Payload.MetaData.content_type", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="size", + full_name="Payload.MetaData.size", + index=2, + number=3, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="seq", + full_name="Payload.MetaData.seq", + index=3, + number=4, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="file_name", + full_name="Payload.MetaData.file_name", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="file_type", + full_name="Payload.MetaData.file_type", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="md5", + full_name="Payload.MetaData.md5", + index=6, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="description", + full_name="Payload.MetaData.description", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (9, 536870912)], + oneofs=[], + serialized_start=1777, + serialized_end=1941) +_PAYLOAD_METRIC_METRICVALUEEXTENSION = _descriptor.Descriptor(name="MetricValueExtension", + full_name="Payload.Metric.MetricValueExtension", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (1, 536870912)], + oneofs=[], + serialized_start=2565, + serialized_end=2597) +_PAYLOAD_METRIC = _descriptor.Descriptor(name="Metric", + full_name="Payload.Metric", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="name", + full_name="Payload.Metric.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="alias", + full_name="Payload.Metric.alias", + index=1, + number=2, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="timestamp", + full_name="Payload.Metric.timestamp", + index=2, + number=3, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="datatype", + full_name="Payload.Metric.datatype", + index=3, + number=4, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="is_historical", + full_name="Payload.Metric.is_historical", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="is_transient", + full_name="Payload.Metric.is_transient", + index=5, + number=6, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="is_null", + full_name="Payload.Metric.is_null", + index=6, + number=7, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="metadata", + full_name="Payload.Metric.metadata", + index=7, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="properties", + full_name="Payload.Metric.properties", + index=8, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="int_value", + full_name="Payload.Metric.int_value", + index=9, + number=10, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="long_value", + full_name="Payload.Metric.long_value", + index=10, + number=11, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="float_value", + full_name="Payload.Metric.float_value", + index=11, + number=12, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=(float(0)), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="double_value", + full_name="Payload.Metric.double_value", + index=12, + number=13, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=(float(0)), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="boolean_value", + full_name="Payload.Metric.boolean_value", + index=13, + number=14, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="string_value", + full_name="Payload.Metric.string_value", + index=14, + number=15, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="bytes_value", + full_name="Payload.Metric.bytes_value", + index=15, + number=16, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="dataset_value", + full_name="Payload.Metric.dataset_value", + index=16, + number=17, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="template_value", + full_name="Payload.Metric.template_value", + index=17, + number=18, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="uint_value", + full_name="Payload.Metric.uint_value", + index=18, + number=19, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="ulong_value", + full_name="Payload.Metric.ulong_value", + index=19, + number=20, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="bcd_value", + full_name="Payload.Metric.bcd_value", + index=20, + number=21, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="bcd32_value", + full_name="Payload.Metric.bcd32_value", + index=21, + number=22, + type=13, + cpp_type=3, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="extension_value", + full_name="Payload.Metric.extension_value", + index=22, + number=23, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[ + _PAYLOAD_METRIC_METRICVALUEEXTENSION], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor(name="value", + full_name="Payload.Metric.value", + index=0, + containing_type=None, + fields=[])], + serialized_start=1944, + serialized_end=2606) +_PAYLOAD = _descriptor.Descriptor(name="Payload", + full_name="Payload", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor(name="timestamp", + full_name="Payload.timestamp", + index=0, + number=1, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="metrics", + full_name="Payload.metrics", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="seq", + full_name="Payload.seq", + index=2, + number=3, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="bdseq", + full_name="Payload.bdseq", + index=3, + number=4, + type=4, + cpp_type=4, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="uuid", + full_name="Payload.uuid", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("").decode("utf-8")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR), + _descriptor.FieldDescriptor(name="body", + full_name="Payload.body", + index=5, + number=6, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=(_b("")), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR)], + extensions=[], + nested_types=[ + _PAYLOAD_TEMPLATE, _PAYLOAD_DATASET, _PAYLOAD_PROPERTYVALUE, _PAYLOAD_PROPERTYSET, + _PAYLOAD_PROPERTYSETLIST, _PAYLOAD_METADATA, _PAYLOAD_METRIC], + enum_types=[], + serialized_options=None, + is_extendable=True, + syntax="proto2", + extension_ranges=[ + (7, 536870912)], + oneofs=[], + serialized_start=21, + serialized_end=2616) +_PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION.containing_type = _PAYLOAD_TEMPLATE_PARAMETER +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["extension_value"].message_type = _PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION +_PAYLOAD_TEMPLATE_PARAMETER.containing_type = _PAYLOAD_TEMPLATE +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["int_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["int_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["long_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["long_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["float_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["float_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["double_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["double_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["boolean_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["boolean_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["string_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["string_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["uint_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["uint_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["ulong_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["ulong_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"].fields.append(_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["extension_value"]) +_PAYLOAD_TEMPLATE_PARAMETER.fields_by_name["extension_value"].containing_oneof = _PAYLOAD_TEMPLATE_PARAMETER.oneofs_by_name["value"] +_PAYLOAD_TEMPLATE.fields_by_name["metrics"].message_type = _PAYLOAD_METRIC +_PAYLOAD_TEMPLATE.fields_by_name["parameters"].message_type = _PAYLOAD_TEMPLATE_PARAMETER +_PAYLOAD_TEMPLATE.containing_type = _PAYLOAD +_PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION.containing_type = _PAYLOAD_DATASET_DATASETVALUE +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["extension_value"].message_type = _PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION +_PAYLOAD_DATASET_DATASETVALUE.containing_type = _PAYLOAD_DATASET +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["int_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["int_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["long_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["long_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["float_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["float_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["double_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["double_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["boolean_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["boolean_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["string_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["string_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["uint_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["uint_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["ulong_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["ulong_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["extension_value"]) +_PAYLOAD_DATASET_DATASETVALUE.fields_by_name["extension_value"].containing_oneof = _PAYLOAD_DATASET_DATASETVALUE.oneofs_by_name["value"] +_PAYLOAD_DATASET_ROW.fields_by_name["elements"].message_type = _PAYLOAD_DATASET_DATASETVALUE +_PAYLOAD_DATASET_ROW.containing_type = _PAYLOAD_DATASET +_PAYLOAD_DATASET.fields_by_name["rows"].message_type = _PAYLOAD_DATASET_ROW +_PAYLOAD_DATASET.containing_type = _PAYLOAD +_PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION.containing_type = _PAYLOAD_PROPERTYVALUE +_PAYLOAD_PROPERTYVALUE.fields_by_name["propertyset_value"].message_type = _PAYLOAD_PROPERTYSET +_PAYLOAD_PROPERTYVALUE.fields_by_name["propertysets_value"].message_type = _PAYLOAD_PROPERTYSETLIST +_PAYLOAD_PROPERTYVALUE.fields_by_name["extension_value"].message_type = _PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION +_PAYLOAD_PROPERTYVALUE.containing_type = _PAYLOAD +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["int_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["int_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["long_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["long_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["float_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["float_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["double_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["double_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["boolean_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["boolean_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["string_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["string_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["propertyset_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["propertyset_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["propertysets_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["propertysets_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["uint_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["uint_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["ulong_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["ulong_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"].fields.append(_PAYLOAD_PROPERTYVALUE.fields_by_name["extension_value"]) +_PAYLOAD_PROPERTYVALUE.fields_by_name["extension_value"].containing_oneof = _PAYLOAD_PROPERTYVALUE.oneofs_by_name["value"] +_PAYLOAD_PROPERTYSET.fields_by_name["values"].message_type = _PAYLOAD_PROPERTYVALUE +_PAYLOAD_PROPERTYSET.containing_type = _PAYLOAD +_PAYLOAD_PROPERTYSETLIST.fields_by_name["propertyset"].message_type = _PAYLOAD_PROPERTYSET +_PAYLOAD_PROPERTYSETLIST.containing_type = _PAYLOAD +_PAYLOAD_METADATA.containing_type = _PAYLOAD +_PAYLOAD_METRIC_METRICVALUEEXTENSION.containing_type = _PAYLOAD_METRIC +_PAYLOAD_METRIC.fields_by_name["metadata"].message_type = _PAYLOAD_METADATA +_PAYLOAD_METRIC.fields_by_name["properties"].message_type = _PAYLOAD_PROPERTYSET +_PAYLOAD_METRIC.fields_by_name["dataset_value"].message_type = _PAYLOAD_DATASET +_PAYLOAD_METRIC.fields_by_name["template_value"].message_type = _PAYLOAD_TEMPLATE +_PAYLOAD_METRIC.fields_by_name["extension_value"].message_type = _PAYLOAD_METRIC_METRICVALUEEXTENSION +_PAYLOAD_METRIC.containing_type = _PAYLOAD +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["int_value"]) +_PAYLOAD_METRIC.fields_by_name["int_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["long_value"]) +_PAYLOAD_METRIC.fields_by_name["long_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["float_value"]) +_PAYLOAD_METRIC.fields_by_name["float_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["double_value"]) +_PAYLOAD_METRIC.fields_by_name["double_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["boolean_value"]) +_PAYLOAD_METRIC.fields_by_name["boolean_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["string_value"]) +_PAYLOAD_METRIC.fields_by_name["string_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["bytes_value"]) +_PAYLOAD_METRIC.fields_by_name["bytes_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["dataset_value"]) +_PAYLOAD_METRIC.fields_by_name["dataset_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["template_value"]) +_PAYLOAD_METRIC.fields_by_name["template_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["uint_value"]) +_PAYLOAD_METRIC.fields_by_name["uint_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["ulong_value"]) +_PAYLOAD_METRIC.fields_by_name["ulong_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["bcd_value"]) +_PAYLOAD_METRIC.fields_by_name["bcd_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["bcd32_value"]) +_PAYLOAD_METRIC.fields_by_name["bcd32_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD_METRIC.oneofs_by_name["value"].fields.append(_PAYLOAD_METRIC.fields_by_name["extension_value"]) +_PAYLOAD_METRIC.fields_by_name["extension_value"].containing_oneof = _PAYLOAD_METRIC.oneofs_by_name["value"] +_PAYLOAD.fields_by_name["metrics"].message_type = _PAYLOAD_METRIC +DESCRIPTOR.message_types_by_name["Payload"] = _PAYLOAD +_sym_db.RegisterFileDescriptor(DESCRIPTOR) +Payload = _reflection.GeneratedProtocolMessageType("Payload", (_message.Message,), dict(Template=(_reflection.GeneratedProtocolMessageType("Template", (_message.Message,), dict(Parameter=(_reflection.GeneratedProtocolMessageType("Parameter", (_message.Message,), dict(ParameterValueExtension=(_reflection.GeneratedProtocolMessageType("ParameterValueExtension", (_message.Message,), dict(DESCRIPTOR=_PAYLOAD_TEMPLATE_PARAMETER_PARAMETERVALUEEXTENSION, + __module__="SparkPlugB_pb2"))), + DESCRIPTOR=_PAYLOAD_TEMPLATE_PARAMETER, + __module__="SparkPlugB_pb2"))), + DESCRIPTOR=_PAYLOAD_TEMPLATE, + __module__="SparkPlugB_pb2"))), + DataSet=(_reflection.GeneratedProtocolMessageType("DataSet", (_message.Message,), dict(DataSetValue=(_reflection.GeneratedProtocolMessageType("DataSetValue", (_message.Message,), dict(DataSetValueExtension=(_reflection.GeneratedProtocolMessageType("DataSetValueExtension", (_message.Message,), dict(DESCRIPTOR=_PAYLOAD_DATASET_DATASETVALUE_DATASETVALUEEXTENSION, + __module__="SparkPlugB_pb2"))), + DESCRIPTOR=_PAYLOAD_DATASET_DATASETVALUE, + __module__="SparkPlugB_pb2"))), + Row=(_reflection.GeneratedProtocolMessageType("Row", (_message.Message,), dict(DESCRIPTOR=_PAYLOAD_DATASET_ROW, + __module__="SparkPlugB_pb2"))), + DESCRIPTOR=_PAYLOAD_DATASET, + __module__="SparkPlugB_pb2"))), + PropertyValue=(_reflection.GeneratedProtocolMessageType("PropertyValue", (_message.Message,), dict(PropertyValueExtension=(_reflection.GeneratedProtocolMessageType("PropertyValueExtension", (_message.Message,), dict(DESCRIPTOR=_PAYLOAD_PROPERTYVALUE_PROPERTYVALUEEXTENSION, + __module__="SparkPlugB_pb2"))), + DESCRIPTOR=_PAYLOAD_PROPERTYVALUE, + __module__="SparkPlugB_pb2"))), + PropertySet=(_reflection.GeneratedProtocolMessageType("PropertySet", (_message.Message,), dict(DESCRIPTOR=_PAYLOAD_PROPERTYSET, + __module__="SparkPlugB_pb2"))), + PropertySetList=(_reflection.GeneratedProtocolMessageType("PropertySetList", (_message.Message,), dict(DESCRIPTOR=_PAYLOAD_PROPERTYSETLIST, + __module__="SparkPlugB_pb2"))), + MetaData=(_reflection.GeneratedProtocolMessageType("MetaData", (_message.Message,), dict(DESCRIPTOR=_PAYLOAD_METADATA, + __module__="SparkPlugB_pb2"))), + Metric=(_reflection.GeneratedProtocolMessageType("Metric", (_message.Message,), dict(MetricValueExtension=(_reflection.GeneratedProtocolMessageType("MetricValueExtension", (_message.Message,), dict(DESCRIPTOR=_PAYLOAD_METRIC_METRICVALUEEXTENSION, + __module__="SparkPlugB_pb2"))), + DESCRIPTOR=_PAYLOAD_METRIC, + __module__="SparkPlugB_pb2"))), + DESCRIPTOR=_PAYLOAD, + __module__="SparkPlugB_pb2")) +_sym_db.RegisterMessage(Payload) +_sym_db.RegisterMessage(Payload.Template) +_sym_db.RegisterMessage(Payload.Template.Parameter) +_sym_db.RegisterMessage(Payload.Template.Parameter.ParameterValueExtension) +_sym_db.RegisterMessage(Payload.DataSet) +_sym_db.RegisterMessage(Payload.DataSet.DataSetValue) +_sym_db.RegisterMessage(Payload.DataSet.DataSetValue.DataSetValueExtension) +_sym_db.RegisterMessage(Payload.DataSet.Row) +_sym_db.RegisterMessage(Payload.PropertyValue) +_sym_db.RegisterMessage(Payload.PropertyValue.PropertyValueExtension) +_sym_db.RegisterMessage(Payload.PropertySet) +_sym_db.RegisterMessage(Payload.PropertySetList) +_sym_db.RegisterMessage(Payload.MetaData) +_sym_db.RegisterMessage(Payload.Metric) +_sym_db.RegisterMessage(Payload.Metric.MetricValueExtension) diff --git a/APPS_UNCOMPILED/lib/timeseries/__init__.py b/APPS_UNCOMPILED/lib/timeseries/__init__.py new file mode 100644 index 0000000..f7159bf --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/__init__.py @@ -0,0 +1,23 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 896 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +from __future__ import absolute_import +from timeseries.clients.time_series_client import TimeSeriesClient +from timeseries.models.badrequest import Badrequest +from timeseries.models.delete_timeseries_request import DeleteTimeseriesRequest +from timeseries.models.error import Error +from timeseries.models.get_timeseries_request import GetTimeseriesRequest +from timeseries.models.notfound import Notfound +from timeseries.models.put_timeseries_request import PutTimeseriesRequest +from timeseries.models.timeseries import Timeseries +from timeseries.models.toomanyrequests import Toomanyrequests +from timeseries.models.unauthorized import Unauthorized diff --git a/APPS_UNCOMPILED/lib/timeseries/clients/__init__.py b/APPS_UNCOMPILED/lib/timeseries/clients/__init__.py new file mode 100644 index 0000000..586c10b --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/clients/__init__.py @@ -0,0 +1,9 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/clients/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 160 bytes +from __future__ import absolute_import +from timeseries.clients.time_series_client import TimeSeriesClient diff --git a/APPS_UNCOMPILED/lib/timeseries/clients/time_series_client.py b/APPS_UNCOMPILED/lib/timeseries/clients/time_series_client.py new file mode 100644 index 0000000..709fa29 --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/clients/time_series_client.py @@ -0,0 +1,108 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/clients/time_series_client.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 8077 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +from __future__ import absolute_import +from mindsphere_core.mindsphere_core import logger +from mindsphere_core import mindsphere_core, exceptions, token_service +from mindsphere_core.token_service import init_credentials + +class TimeSeriesClient: + __base_path__ = "/api/iottimeseries/v3" + __model_package__ = __name__.split(".")[0] + + def __init__(self, rest_client_config=None, mindsphere_credentials=None): + self.rest_client_config = rest_client_config + self.mindsphere_credentials = init_credentials(mindsphere_credentials) + + def delete_timeseries(self, request_object): + """delete time series + + Delete time series data for a single entity and propertyset within a given time range. Data for all properties within a propertyset is deleted. + + :param DeleteTimeseriesRequest request_object: It contains the below parameters --> |br| ( entity* - unique identifier of the entity ), |br| ( propertysetname* - name of the propertyset ), |br| ( from* - beginning of the timerange to delete (exclusive) ), |br| ( to* - end of the timerange to delete (inclusive) ) + + :return: None + """ + logger.info("TimeSeriesClient.delete_timeseries() invoked.") + if request_object is None: + raise exceptions.MindsphereClientError("`request_object` is not passed when calling `delete_timeseries`") + if request_object.entity is None: + raise exceptions.MindsphereClientError("The required parameter `entity` is missing from `request_object`, when calling `delete_timeseries`") + if request_object.propertysetname is None: + raise exceptions.MindsphereClientError("The required parameter `propertysetname` is missing from `request_object`, when calling `delete_timeseries`") + if request_object._from is None: + raise exceptions.MindsphereClientError("The required parameter `from` is missing from `request_object`, when calling `delete_timeseries`") + if request_object.to is None: + raise exceptions.MindsphereClientError("The required parameter `to` is missing from `request_object`, when calling `delete_timeseries`") + end_point_url = "/timeseries/{entity}/{propertysetname}" + end_point_url = end_point_url.format(entity=(request_object.entity), propertysetname=(request_object.propertysetname)) + token = token_service.fetch_token(self.rest_client_config, self.mindsphere_credentials) + api_url = mindsphere_core.build_url(self.__base_path__, end_point_url, self.rest_client_config) + headers = {'Accept':"application/json", 'Content-Type':"application/json", 'Authorization':"Bearer " + (str(token))} + query_params = {'from':request_object._from, 'to':request_object.to} + form_params, local_var_files, body_params = {}, {}, None + logger.info("TimeSeriesClient.delete_timeseries() --> Proceeding for API Invoker.") + return mindsphere_core.invoke_service(self.rest_client_config, api_url, headers, "DELETE", query_params, form_params, body_params, local_var_files, None, self.__model_package__) + + def get_timeseries(self, request_object): + """read time series + + Read time series data for a single entity and propertyset. Returns data for a specified time range. Returns the latest value if no range is provided. + + :param GetTimeseriesRequest request_object: It contains the below parameters --> |br| ( entity* - unique identifier of the entity ), |br| ( propertysetname* - name of the propertyset ), |br| ( from - beginning of the time range to read (exclusive) ), |br| ( to - end of the time range to read (inclusive) ), |br| ( limit - maximum number of entries to read ), |br| ( select - select fields to return ), |br| ( sort - sort order by time, permissible values are asc and desc ) + + :return: list[Timeseries] + """ + logger.info("TimeSeriesClient.get_timeseries() invoked.") + if request_object is None: + raise exceptions.MindsphereClientError("`request_object` is not passed when calling `get_timeseries`") + if request_object.entity is None: + raise exceptions.MindsphereClientError("The required parameter `entity` is missing from `request_object`, when calling `get_timeseries`") + if request_object.propertysetname is None: + raise exceptions.MindsphereClientError("The required parameter `propertysetname` is missing from `request_object`, when calling `get_timeseries`") + end_point_url = "/timeseries/{entity}/{propertysetname}" + end_point_url = end_point_url.format(entity=(request_object.entity), propertysetname=(request_object.propertysetname)) + token = token_service.fetch_token(self.rest_client_config, self.mindsphere_credentials) + api_url = mindsphere_core.build_url(self.__base_path__, end_point_url, self.rest_client_config) + headers = {'Accept':"application/json", 'Content-Type':"application/json", 'Authorization':"Bearer " + (str(token))} + query_params = {'from':request_object._from, 'to':request_object.to, 'limit':request_object.limit, 'select':request_object.select, 'sort':request_object.sort} + form_params, local_var_files, body_params = {}, {}, None + logger.info("TimeSeriesClient.get_timeseries() --> Proceeding for API Invoker.") + return mindsphere_core.invoke_service(self.rest_client_config, api_url, headers, "GET", query_params, form_params, body_params, local_var_files, "list[Timeseries]", self.__model_package__) + + def put_timeseries(self, request_object): + """write or update time series + + Write or update time series data for a single entity and propertyset. Existing time series data is overwritten. Data for all properties within a propertyset needs to be provided together. + + :param PutTimeseriesRequest request_object: It contains the below parameters --> |br| ( entity* - unique identifier of the entity ), |br| ( propertysetname* - name of the propertyset ), |br| ( timeseries* - time series data array ) + + :return: None + """ + logger.info("TimeSeriesClient.put_timeseries() invoked.") + if request_object is None: + raise exceptions.MindsphereClientError("`request_object` is not passed when calling `put_timeseries`") + if request_object.entity is None: + raise exceptions.MindsphereClientError("The required parameter `entity` is missing from `request_object`, when calling `put_timeseries`") + if request_object.propertysetname is None: + raise exceptions.MindsphereClientError("The required parameter `propertysetname` is missing from `request_object`, when calling `put_timeseries`") + if request_object.timeseries is None: + raise exceptions.MindsphereClientError("The required parameter `timeseries` is missing from `request_object`, when calling `put_timeseries`") + end_point_url = "/timeseries/{entity}/{propertysetname}" + end_point_url = end_point_url.format(entity=(request_object.entity), propertysetname=(request_object.propertysetname)) + token = token_service.fetch_token(self.rest_client_config, self.mindsphere_credentials) + api_url = mindsphere_core.build_url(self.__base_path__, end_point_url, self.rest_client_config) + headers = {'Accept':"application/json", 'Content-Type':"application/json", 'Authorization':"Bearer " + (str(token))} + query_params = {} + form_params, local_var_files, body_params = {}, {}, request_object.timeseries + logger.info("TimeSeriesClient.put_timeseries() --> Proceeding for API Invoker.") + return mindsphere_core.invoke_service(self.rest_client_config, api_url, headers, "PUT", query_params, form_params, body_params, local_var_files, None, self.__model_package__) diff --git a/APPS_UNCOMPILED/lib/timeseries/models/__init__.py b/APPS_UNCOMPILED/lib/timeseries/models/__init__.py new file mode 100644 index 0000000..711ccaa --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/__init__.py @@ -0,0 +1,22 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/__init__.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 790 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +from __future__ import absolute_import +from timeseries.models.badrequest import Badrequest +from timeseries.models.delete_timeseries_request import DeleteTimeseriesRequest +from timeseries.models.error import Error +from timeseries.models.get_timeseries_request import GetTimeseriesRequest +from timeseries.models.notfound import Notfound +from timeseries.models.put_timeseries_request import PutTimeseriesRequest +from timeseries.models.timeseries import Timeseries +from timeseries.models.toomanyrequests import Toomanyrequests +from timeseries.models.unauthorized import Unauthorized diff --git a/APPS_UNCOMPILED/lib/timeseries/models/badrequest.py b/APPS_UNCOMPILED/lib/timeseries/models/badrequest.py new file mode 100644 index 0000000..f207e1e --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/badrequest.py @@ -0,0 +1,102 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/badrequest.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3219 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from mindsphere_core.exceptions import MindsphereClientError + +class Badrequest(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = {'id':"str", + 'message':"str"} + attribute_map = {'id':"id", + 'message':"message"} + + def __init__(self, id=None, message=None): + self._id = id + self._message = message + self.discriminator = None + + @property + def id(self): + """Gets the id of this Badrequest. + + :return: The id of this Badrequest. + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this Badrequest. + + :param id: The id of this Badrequest. + :type: str + """ + self._id = id + + @property + def message(self): + """Gets the message of this Badrequest. + + :return: The message of this Badrequest. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this Badrequest. + + :param message: The message of this Badrequest. + :type: str + """ + self._message = message + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(Badrequest, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, Badrequest): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/timeseries/models/delete_timeseries_request.py b/APPS_UNCOMPILED/lib/timeseries/models/delete_timeseries_request.py new file mode 100644 index 0000000..fbd29ca --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/delete_timeseries_request.py @@ -0,0 +1,146 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/delete_timeseries_request.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 4641 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from mindsphere_core.exceptions import MindsphereClientError + +class DeleteTimeseriesRequest(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = { + '_from': '"str"', + 'to': '"str"', + 'entity': '"str"', + 'propertysetname': '"str"'} + attribute_map = { + '_from': '"from"', + 'to': '"to"', + 'entity': '"entity"', + 'propertysetname': '"propertysetname"'} + + def __init__(self, _from=None, to=None, entity=None, propertysetname=None): + self._DeleteTimeseriesRequest__from = _from + self._to = to + self._entity = entity + self._propertysetname = propertysetname + self.discriminator = None + + @property + def _from(self): + """Gets the _from of this DeleteTimeseriesRequest. + + :return: The _from of this DeleteTimeseriesRequest. + :rtype: str + """ + return self._DeleteTimeseriesRequest__from + + @_from.setter + def _from(self, _from): + """Sets the _from of this DeleteTimeseriesRequest. + + :param _from: The _from of this DeleteTimeseriesRequest. + :type: str + """ + self._DeleteTimeseriesRequest__from = _from + + @property + def to(self): + """Gets the to of this DeleteTimeseriesRequest. + + :return: The to of this DeleteTimeseriesRequest. + :rtype: str + """ + return self._to + + @to.setter + def to(self, to): + """Sets the to of this DeleteTimeseriesRequest. + + :param to: The to of this DeleteTimeseriesRequest. + :type: str + """ + self._to = to + + @property + def entity(self): + """Gets the entity of this DeleteTimeseriesRequest. + + :return: The entity of this DeleteTimeseriesRequest. + :rtype: str + """ + return self._entity + + @entity.setter + def entity(self, entity): + """Sets the entity of this DeleteTimeseriesRequest. + + :param entity: The entity of this DeleteTimeseriesRequest. + :type: str + """ + self._entity = entity + + @property + def propertysetname(self): + """Gets the propertysetname of this DeleteTimeseriesRequest. + + :return: The propertysetname of this DeleteTimeseriesRequest. + :rtype: str + """ + return self._propertysetname + + @propertysetname.setter + def propertysetname(self, propertysetname): + """Sets the propertysetname of this DeleteTimeseriesRequest. + + :param propertysetname: The propertysetname of this DeleteTimeseriesRequest. + :type: str + """ + self._propertysetname = propertysetname + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(DeleteTimeseriesRequest, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, DeleteTimeseriesRequest): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/timeseries/models/error.py b/APPS_UNCOMPILED/lib/timeseries/models/error.py new file mode 100644 index 0000000..c3384dd --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/error.py @@ -0,0 +1,102 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/error.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3164 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from mindsphere_core.exceptions import MindsphereClientError + +class Error(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = {'id':"str", + 'message':"str"} + attribute_map = {'id':"id", + 'message':"message"} + + def __init__(self, id=None, message=None): + self._id = id + self._message = message + self.discriminator = None + + @property + def id(self): + """Gets the id of this Error. + + :return: The id of this Error. + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this Error. + + :param id: The id of this Error. + :type: str + """ + self._id = id + + @property + def message(self): + """Gets the message of this Error. + + :return: The message of this Error. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this Error. + + :param message: The message of this Error. + :type: str + """ + self._message = message + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(Error, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, Error): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/timeseries/models/get_timeseries_request.py b/APPS_UNCOMPILED/lib/timeseries/models/get_timeseries_request.py new file mode 100644 index 0000000..53eaf6e --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/get_timeseries_request.py @@ -0,0 +1,209 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/get_timeseries_request.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 6225 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from mindsphere_core.exceptions import MindsphereClientError + +class GetTimeseriesRequest(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = { + 'select': '"str"', + 'limit': '"int"', + '_from': '"str"', + 'to': '"str"', + 'sort': '"str"', + 'entity': '"str"', + 'propertysetname': '"str"'} + attribute_map = { + 'select': '"select"', + 'limit': '"limit"', + '_from': '"from"', + 'to': '"to"', + 'sort': '"sort"', + 'entity': '"entity"', + 'propertysetname': '"propertysetname"'} + + def __init__(self, select=None, limit=None, _from=None, to=None, sort=None, entity=None, propertysetname=None): + self._select = select + self._limit = limit + self._GetTimeseriesRequest__from = _from + self._to = to + self._sort = sort + self._entity = entity + self._propertysetname = propertysetname + self.discriminator = None + + @property + def select(self): + """Gets the select of this GetTimeseriesRequest. + + :return: The select of this GetTimeseriesRequest. + :rtype: str + """ + return self._select + + @select.setter + def select(self, select): + """Sets the select of this GetTimeseriesRequest. + + :param select: The select of this GetTimeseriesRequest. + :type: str + """ + self._select = select + + @property + def limit(self): + """Gets the limit of this GetTimeseriesRequest. + + :return: The limit of this GetTimeseriesRequest. + :rtype: int + """ + return self._limit + + @limit.setter + def limit(self, limit): + """Sets the limit of this GetTimeseriesRequest. + + :param limit: The limit of this GetTimeseriesRequest. + :type: int + """ + self._limit = limit + + @property + def _from(self): + """Gets the _from of this GetTimeseriesRequest. + + :return: The _from of this GetTimeseriesRequest. + :rtype: str + """ + return self._GetTimeseriesRequest__from + + @_from.setter + def _from(self, _from): + """Sets the _from of this GetTimeseriesRequest. + + :param _from: The _from of this GetTimeseriesRequest. + :type: str + """ + self._GetTimeseriesRequest__from = _from + + @property + def to(self): + """Gets the to of this GetTimeseriesRequest. + + :return: The to of this GetTimeseriesRequest. + :rtype: str + """ + return self._to + + @to.setter + def to(self, to): + """Sets the to of this GetTimeseriesRequest. + + :param to: The to of this GetTimeseriesRequest. + :type: str + """ + self._to = to + + @property + def sort(self): + """Gets the sort of this GetTimeseriesRequest. + + :return: The sort of this GetTimeseriesRequest. + :rtype: str + """ + return self._sort + + @sort.setter + def sort(self, sort): + """Sets the sort of this GetTimeseriesRequest. + + :param sort: The sort of this GetTimeseriesRequest. + :type: str + """ + self._sort = sort + + @property + def entity(self): + """Gets the entity of this GetTimeseriesRequest. + + :return: The entity of this GetTimeseriesRequest. + :rtype: str + """ + return self._entity + + @entity.setter + def entity(self, entity): + """Sets the entity of this GetTimeseriesRequest. + + :param entity: The entity of this GetTimeseriesRequest. + :type: str + """ + self._entity = entity + + @property + def propertysetname(self): + """Gets the propertysetname of this GetTimeseriesRequest. + + :return: The propertysetname of this GetTimeseriesRequest. + :rtype: str + """ + return self._propertysetname + + @propertysetname.setter + def propertysetname(self, propertysetname): + """Sets the propertysetname of this GetTimeseriesRequest. + + :param propertysetname: The propertysetname of this GetTimeseriesRequest. + :type: str + """ + self._propertysetname = propertysetname + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(GetTimeseriesRequest, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, GetTimeseriesRequest): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/timeseries/models/notfound.py b/APPS_UNCOMPILED/lib/timeseries/models/notfound.py new file mode 100644 index 0000000..c9f33bc --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/notfound.py @@ -0,0 +1,102 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/notfound.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3197 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from mindsphere_core.exceptions import MindsphereClientError + +class Notfound(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = {'id':"str", + 'message':"str"} + attribute_map = {'id':"id", + 'message':"message"} + + def __init__(self, id=None, message=None): + self._id = id + self._message = message + self.discriminator = None + + @property + def id(self): + """Gets the id of this Notfound. + + :return: The id of this Notfound. + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this Notfound. + + :param id: The id of this Notfound. + :type: str + """ + self._id = id + + @property + def message(self): + """Gets the message of this Notfound. + + :return: The message of this Notfound. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this Notfound. + + :param message: The message of this Notfound. + :type: str + """ + self._message = message + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(Notfound, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, Notfound): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/timeseries/models/put_timeseries_request.py b/APPS_UNCOMPILED/lib/timeseries/models/put_timeseries_request.py new file mode 100644 index 0000000..ec3b515 --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/put_timeseries_request.py @@ -0,0 +1,124 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/put_timeseries_request.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 4276 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from timeseries.models.timeseries import Timeseries +from mindsphere_core.exceptions import MindsphereClientError + +class PutTimeseriesRequest(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = {'timeseries':"list[Timeseries]", + 'entity':"str", + 'propertysetname':"str"} + attribute_map = {'timeseries':"timeseries", + 'entity':"entity", + 'propertysetname':"propertysetname"} + + def __init__(self, timeseries=None, entity=None, propertysetname=None): + self._timeseries = timeseries + self._entity = entity + self._propertysetname = propertysetname + self.discriminator = None + + @property + def timeseries(self): + """Gets the timeseries of this PutTimeseriesRequest. + + :return: The timeseries of this PutTimeseriesRequest. + :rtype: list[Timeseries] + """ + return self._timeseries + + @timeseries.setter + def timeseries(self, timeseries): + """Sets the timeseries of this PutTimeseriesRequest. + + :param timeseries: The timeseries of this PutTimeseriesRequest. + :type: list[Timeseries] + """ + self._timeseries = timeseries + + @property + def entity(self): + """Gets the entity of this PutTimeseriesRequest. + + :return: The entity of this PutTimeseriesRequest. + :rtype: str + """ + return self._entity + + @entity.setter + def entity(self, entity): + """Sets the entity of this PutTimeseriesRequest. + + :param entity: The entity of this PutTimeseriesRequest. + :type: str + """ + self._entity = entity + + @property + def propertysetname(self): + """Gets the propertysetname of this PutTimeseriesRequest. + + :return: The propertysetname of this PutTimeseriesRequest. + :rtype: str + """ + return self._propertysetname + + @propertysetname.setter + def propertysetname(self, propertysetname): + """Sets the propertysetname of this PutTimeseriesRequest. + + :param propertysetname: The propertysetname of this PutTimeseriesRequest. + :type: str + """ + self._propertysetname = propertysetname + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(PutTimeseriesRequest, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PutTimeseriesRequest): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/timeseries/models/timeseries.py b/APPS_UNCOMPILED/lib/timeseries/models/timeseries.py new file mode 100644 index 0000000..b34ea05 --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/timeseries.py @@ -0,0 +1,104 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/timeseries.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3395 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from mindsphere_core.exceptions import MindsphereClientError + +class Timeseries(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = {'time':"str", + 'fields':"dict(str, object)"} + attribute_map = {'time':"_time", + 'fields':"fields"} + + def __init__(self, time=None, fields=None): + self._time = time + self._fields = fields + self.discriminator = None + + @property + def time(self): + """Gets the time of this Timeseries. + + :return: The time of this Timeseries. + :rtype: str + """ + return self._time + + @time.setter + def time(self, time): + """Sets the time of this Timeseries. + + :param time: The time of this Timeseries. + :type: str + """ + if time is None: + raise MindsphereClientError("Invalid value for `time`, must not be `None`") + self._time = time + + @property + def fields(self): + """Gets the fields of this Timeseries. + + :return: The fields of this Timeseries. + :rtype: dict(str, object) + """ + return self._fields + + @fields.setter + def fields(self, fields): + """Sets the fields of this Timeseries. + + :param fields: The fields of this Timeseries. + :type: dict(str, object) + """ + self._fields = fields + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(Timeseries, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, Timeseries): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/timeseries/models/toomanyrequests.py b/APPS_UNCOMPILED/lib/timeseries/models/toomanyrequests.py new file mode 100644 index 0000000..1631f49 --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/toomanyrequests.py @@ -0,0 +1,102 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/toomanyrequests.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3274 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from mindsphere_core.exceptions import MindsphereClientError + +class Toomanyrequests(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = {'id':"str", + 'message':"str"} + attribute_map = {'id':"id", + 'message':"message"} + + def __init__(self, id=None, message=None): + self._id = id + self._message = message + self.discriminator = None + + @property + def id(self): + """Gets the id of this Toomanyrequests. + + :return: The id of this Toomanyrequests. + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this Toomanyrequests. + + :param id: The id of this Toomanyrequests. + :type: str + """ + self._id = id + + @property + def message(self): + """Gets the message of this Toomanyrequests. + + :return: The message of this Toomanyrequests. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this Toomanyrequests. + + :param message: The message of this Toomanyrequests. + :type: str + """ + self._message = message + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(Toomanyrequests, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, Toomanyrequests): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/timeseries/models/unauthorized.py b/APPS_UNCOMPILED/lib/timeseries/models/unauthorized.py new file mode 100644 index 0000000..5ddaedd --- /dev/null +++ b/APPS_UNCOMPILED/lib/timeseries/models/unauthorized.py @@ -0,0 +1,102 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/timeseries/models/unauthorized.py +# Compiled at: 2024-04-18 03:12:58 +# Size of source mod 2**32: 3241 bytes +""" + IoT Time Series API + + Store and query time series data with a precision of 1 millisecond. # noqa: E501 +""" +import pprint, re, six +from mindsphere_core.exceptions import MindsphereClientError + +class Unauthorized(object): + __doc__ = "\n Attributes:\n attribute_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n " + attribute_types = {'id':"str", + 'message':"str"} + attribute_map = {'id':"id", + 'message':"message"} + + def __init__(self, id=None, message=None): + self._id = id + self._message = message + self.discriminator = None + + @property + def id(self): + """Gets the id of this Unauthorized. + + :return: The id of this Unauthorized. + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this Unauthorized. + + :param id: The id of this Unauthorized. + :type: str + """ + self._id = id + + @property + def message(self): + """Gets the message of this Unauthorized. + + :return: The message of this Unauthorized. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this Unauthorized. + + :param message: The message of this Unauthorized. + :type: str + """ + self._message = message + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + for attr, _ in six.iteritems(self.attribute_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map((lambda x: if hasattr(x, "to_dict"): +x.to_dict() # Avoid dead code: x), value)) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map((lambda item: if hasattr(item[1], "to_dict"): +(item[0], item[1].to_dict()) # Avoid dead code: item), value.items())) + else: + result[attr] = value + + if issubclass(Unauthorized, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, Unauthorized): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/APPS_UNCOMPILED/lib/typing_extensions.py b/APPS_UNCOMPILED/lib/typing_extensions.py new file mode 100644 index 0000000..ba90e98 --- /dev/null +++ b/APPS_UNCOMPILED/lib/typing_extensions.py @@ -0,0 +1,2116 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/typing_extensions.py +# Compiled at: 2024-04-18 03:12:56 +# Size of source mod 2**32: 108429 bytes +import abc, collections, contextlib, sys, typing +import collections.abc as collections_abc +import operator +from typing import Generic, Callable, TypeVar, Tuple +PEP_560 = sys.version_info[None[:3]] >= (3, 7, 0) +if PEP_560: + GenericMeta = TypingMeta = type +else: + from typing import GenericMeta, TypingMeta +OLD_GENERICS = False +try: + from typing import _type_vars, _next_in_mro, _type_check +except ImportError: + OLD_GENERICS = True + +try: + from typing import _subs_tree + SUBS_TREE = True +except ImportError: + SUBS_TREE = False + +try: + from typing import _tp_cache +except ImportError: + + def _tp_cache(x): + return x + + +try: + from typing import _TypingEllipsis, _TypingEmpty +except ImportError: + + class _TypingEllipsis: + pass + + + class _TypingEmpty: + pass + + +def _no_slots_copy(dct): + dict_copy = dict(dct) + if "__slots__" in dict_copy: + for slot in dict_copy["__slots__"]: + dict_copy.pop(slot, None) + + return dict_copy + + +def _check_generic(cls, parameters): + if not cls.__parameters__: + raise TypeError("%s is not a generic class" % repr(cls)) + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError("Too %s parameters for %s; actual %s, expected %s" % ( + "many" if alen > elen else "few", repr(cls), alen, elen)) + + +if hasattr(typing, "_generic_new"): + _generic_new = typing._generic_new +else: + + def _generic_new(base_cls, cls, *args, **kwargs): + return (base_cls.__new__)(cls, *args, **kwargs) + + +if hasattr(typing, "_geqv"): + from typing import _geqv + _geqv_defined = True +else: + _geqv = None + _geqv_defined = False +if sys.version_info[None[:2]] >= (3, 6): + import _collections_abc + _check_methods_in_mro = _collections_abc._check_methods +else: + + def _check_methods_in_mro(C, *methods): + mro = C.__mro__ + for method in methods: + for B in mro: + if method in B.__dict__: + if B.__dict__[method] is None: + return NotImplemented + break + else: + return NotImplemented + + return True + + +__all__ = [ + 'ClassVar', + 'Concatenate', + 'Final', + 'ParamSpec', + 'Type', + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'OrderedDictTypedDict', + 'SupportsIndex', + 'final', + 'IntVar', + 'Literal', + 'NewType', + 'overload', + 'Text', + 'TypeAlias', + 'TypeGuard', + 'TYPE_CHECKING'] +HAVE_ANNOTATED = PEP_560 or SUBS_TREE +if PEP_560: + __all__.extend(["get_args", "get_origin", "get_type_hints"]) +elif HAVE_ANNOTATED: + __all__.append("Annotated") +else: + HAVE_PROTOCOLS = sys.version_info[None[:3]] != (3, 5, 0) + if HAVE_PROTOCOLS: + __all__.extend(["Protocol", "runtime", "runtime_checkable"]) + else: + if hasattr(typing, "NoReturn"): + NoReturn = typing.NoReturn + else: + if hasattr(typing, "_FinalTypingBase"): + + class _NoReturn(typing._FinalTypingBase, _root=True): + __doc__ = "Special type indicating functions that never return.\n Example::\n\n from typing import NoReturn\n\n def stop() -> NoReturn:\n raise Exception('no way')\n\n This type is invalid in other positions, e.g., ``List[NoReturn]``\n will fail in static type checkers.\n " + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("NoReturn cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("NoReturn cannot be used with issubclass().") + + + NoReturn = _NoReturn(_root=True) + else: + + class _NoReturnMeta(typing.TypingMeta): + __doc__ = "Metaclass for NoReturn" + + def __new__(cls, name, bases, namespace, _root=False): + return super().__new__(cls, name, bases, namespace, _root=_root) + + def __instancecheck__(self, obj): + raise TypeError("NoReturn cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("NoReturn cannot be used with issubclass().") + + + class NoReturn(typing.Final, metaclass=_NoReturnMeta, _root=True): + __doc__ = "Special type indicating functions that never return.\n Example::\n\n from typing import NoReturn\n\n def stop() -> NoReturn:\n raise Exception('no way')\n\n This type is invalid in other positions, e.g., ``List[NoReturn]``\n will fail in static type checkers.\n " + __slots__ = () + + + T = typing.TypeVar("T") + KT = typing.TypeVar("KT") + VT = typing.TypeVar("VT") + T_co = typing.TypeVar("T_co", covariant=True) + V_co = typing.TypeVar("V_co", covariant=True) + VT_co = typing.TypeVar("VT_co", covariant=True) + T_contra = typing.TypeVar("T_contra", contravariant=True) + if hasattr(typing, "ClassVar"): + ClassVar = typing.ClassVar + else: + if hasattr(typing, "_FinalTypingBase"): + + class _ClassVar(typing._FinalTypingBase, _root=True): + __doc__ = "Special type construct to mark class variables.\n\n An annotation wrapped in ClassVar indicates that a given\n attribute is intended to be used as a class variable and\n should not be set on instances of that class. Usage::\n\n class Starship:\n stats: ClassVar[Dict[str, int]] = {} # class variable\n damage: int = 10 # instance variable\n\n ClassVar accepts only types and cannot be further subscribed.\n\n Note that ClassVar is not a class itself, and should not\n be used with isinstance() or issubclass().\n " + __slots__ = ('__type__', ) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls((typing._type_check(item, "{} accepts only single type.".format(cls.__name__[1[:None]]))), + _root=True) + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1[:None]])) + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += "[{}]".format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + + ClassVar = _ClassVar(_root=True) + else: + + class _ClassVarMeta(typing.TypingMeta): + __doc__ = "Metaclass for ClassVar" + + def __new__(cls, name, bases, namespace, tp=None, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + if tp is not None: + self.__type__ = tp + return self + + def __instancecheck__(self, obj): + raise TypeError("ClassVar cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("ClassVar cannot be used with issubclass().") + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is not None: + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1[:None]])) + param = typing._type_check(item, "{} accepts only single type.".format(cls.__name__[1[:None]])) + return cls((self.__name__), (self.__bases__), (dict(self.__dict__)), + tp=param, _root=True) + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)((self.__name__), (self.__bases__), (dict(self.__dict__)), + tp=(self.__type__), _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += "[{}]".format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + + class ClassVar(typing.Final, metaclass=_ClassVarMeta, _root=True): + __doc__ = "Special type construct to mark class variables.\n\n An annotation wrapped in ClassVar indicates that a given\n attribute is intended to be used as a class variable and\n should not be set on instances of that class. Usage::\n\n class Starship:\n stats: ClassVar[Dict[str, int]] = {} # class variable\n damage: int = 10 # instance variable\n\n ClassVar accepts only types and cannot be further subscribed.\n\n Note that ClassVar is not a class itself, and should not\n be used with isinstance() or issubclass().\n " + __type__ = None + + + if hasattr(typing, "Final"): + if sys.version_info[None[:2]] >= (3, 7): + Final = typing.Final + else: + if sys.version_info[None[:2]] >= (3, 7): + + class _FinalForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return "typing_extensions." + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, "{} accepts only single type".format(self._name)) + return _GenericAlias(self, (item,)) + + + Final = _FinalForm("Final", doc="A special typing construct to indicate that a name\n cannot be re-assigned or overridden in a subclass.\n For example:\n\n MAX_SIZE: Final = 9000\n MAX_SIZE += 1 # Error reported by type checker\n\n class Connection:\n TIMEOUT: Final[int] = 10\n class FastConnector(Connection):\n TIMEOUT = 1 # Error reported by type checker\n\n There is no runtime checking of these properties.") + else: + if hasattr(typing, "_FinalTypingBase"): + + class _Final(typing._FinalTypingBase, _root=True): + __doc__ = "A special typing construct to indicate that a name\n cannot be re-assigned or overridden in a subclass.\n For example:\n\n MAX_SIZE: Final = 9000\n MAX_SIZE += 1 # Error reported by type checker\n\n class Connection:\n TIMEOUT: Final[int] = 10\n class FastConnector(Connection):\n TIMEOUT = 1 # Error reported by type checker\n\n There is no runtime checking of these properties.\n " + __slots__ = ('__type__', ) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls((typing._type_check(item, "{} accepts only single type.".format(cls.__name__[1[:None]]))), + _root=True) + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1[:None]])) + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += "[{}]".format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _Final): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + + Final = _Final(_root=True) + else: + + class _FinalMeta(typing.TypingMeta): + __doc__ = "Metaclass for Final" + + def __new__(cls, name, bases, namespace, tp=None, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + if tp is not None: + self.__type__ = tp + return self + + def __instancecheck__(self, obj): + raise TypeError("Final cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Final cannot be used with issubclass().") + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is not None: + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1[:None]])) + param = typing._type_check(item, "{} accepts only single type.".format(cls.__name__[1[:None]])) + return cls((self.__name__), (self.__bases__), (dict(self.__dict__)), + tp=param, _root=True) + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)((self.__name__), (self.__bases__), (dict(self.__dict__)), + tp=(self.__type__), _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += "[{}]".format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, Final): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + + class Final(typing.Final, metaclass=_FinalMeta, _root=True): + __doc__ = "A special typing construct to indicate that a name\n cannot be re-assigned or overridden in a subclass.\n For example:\n\n MAX_SIZE: Final = 9000\n MAX_SIZE += 1 # Error reported by type checker\n\n class Connection:\n TIMEOUT: Final[int] = 10\n class FastConnector(Connection):\n TIMEOUT = 1 # Error reported by type checker\n\n There is no runtime checking of these properties.\n " + __type__ = None + + + if hasattr(typing, "final"): + final = typing.final + else: + + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. + """ + return f + + + def IntVar(name): + return TypeVar(name) + + + if hasattr(typing, "Literal"): + Literal = typing.Literal + else: + if sys.version_info[None[:2]] >= (3, 7): + + class _LiteralForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return "typing_extensions." + self._name + + def __getitem__(self, parameters): + return _GenericAlias(self, parameters) + + + Literal = _LiteralForm("Literal", doc="A type that can be used to indicate to type checkers\n that the corresponding value has a value literally equivalent\n to the provided parameter. For example:\n\n var: Literal[4] = 4\n\n The type checker understands that 'var' is literally equal to\n the value 4 and no other value.\n\n Literal[...] cannot be subclassed. There is no runtime\n checking verifying that the parameter is actually a value\n instead of a type.") + else: + if hasattr(typing, "_FinalTypingBase"): + + class _Literal(typing._FinalTypingBase, _root=True): + __doc__ = "A type that can be used to indicate to type checkers that the\n corresponding value has a value literally equivalent to the\n provided parameter. For example:\n\n var: Literal[4] = 4\n\n The type checker understands that 'var' is literally equal to the\n value 4 and no other value.\n\n Literal[...] cannot be subclassed. There is no runtime checking\n verifying that the parameter is actually a value instead of a type.\n " + __slots__ = ('__values__', ) + + def __init__(self, values=None, **kwds): + self.__values__ = values + + def __getitem__(self, values): + cls = type(self) + if self.__values__ is None: + if not isinstance(values, tuple): + values = ( + values,) + return cls(values, _root=True) + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1[:None]])) + + def _eval_type(self, globalns, localns): + return self + + def __repr__(self): + r = super().__repr__() + if self.__values__ is not None: + r += "[{}]".format(", ".join(map(typing._type_repr, self.__values__))) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__values__)) + + def __eq__(self, other): + if not isinstance(other, _Literal): + return NotImplemented + if self.__values__ is not None: + return self.__values__ == other.__values__ + return self is other + + + Literal = _Literal(_root=True) + else: + + class _LiteralMeta(typing.TypingMeta): + __doc__ = "Metaclass for Literal" + + def __new__(cls, name, bases, namespace, values=None, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + if values is not None: + self.__values__ = values + return self + + def __instancecheck__(self, obj): + raise TypeError("Literal cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Literal cannot be used with issubclass().") + + def __getitem__(self, item): + cls = type(self) + if self.__values__ is not None: + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1[:None]])) + if not isinstance(item, tuple): + item = ( + item,) + return cls((self.__name__), (self.__bases__), (dict(self.__dict__)), + values=item, _root=True) + + def _eval_type(self, globalns, localns): + return self + + def __repr__(self): + r = super().__repr__() + if self.__values__ is not None: + r += "[{}]".format(", ".join(map(typing._type_repr, self.__values__))) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__values__)) + + def __eq__(self, other): + if not isinstance(other, Literal): + return NotImplemented + if self.__values__ is not None: + return self.__values__ == other.__values__ + return self is other + + + class Literal(typing.Final, metaclass=_LiteralMeta, _root=True): + __doc__ = "A type that can be used to indicate to type checkers that the\n corresponding value has a value literally equivalent to the\n provided parameter. For example:\n\n var: Literal[4] = 4\n\n The type checker understands that 'var' is literally equal to the\n value 4 and no other value.\n\n Literal[...] cannot be subclassed. There is no runtime checking\n verifying that the parameter is actually a value instead of a type.\n " + __values__ = None + + + def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError("You should not call an overloaded function. A series of @overload-decorated functions outside a stub module should always be followed by an implementation that is not @overload-ed.") + + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + + if hasattr(typing, "Type"): + Type = typing.Type + else: + CT_co = typing.TypeVar("CT_co", covariant=True, bound=type) + + class Type(typing.Generic[CT_co], extra=type): + __doc__ = "A special construct usable to annotate class objects.\n\n For example, suppose we have the following classes::\n\n class User: ... # Abstract base for User classes\n class BasicUser(User): ...\n class ProUser(User): ...\n class TeamUser(User): ...\n\n And a function that takes a class argument that's a subclass of\n User and returns an instance of the corresponding class::\n\n U = TypeVar('U', bound=User)\n def new_user(user_class: Type[U]) -> U:\n user = user_class()\n # (Here we could write the user object to a database)\n return user\n joe = new_user(BasicUser)\n\n At this point the type checker knows that joe has type BasicUser.\n " + __slots__ = () + + + def _define_guard(type_name): + """ + Returns True if the given type isn't defined in typing but + is defined in collections_abc. + + Adds the type to __all__ if the collection is found in either + typing or collection_abc. + """ + if hasattr(typing, type_name): + __all__.append(type_name) + globals()[type_name] = getattr(typing, type_name) + return False + if hasattr(collections_abc, type_name): + __all__.append(type_name) + return True + return False + + + class _ExtensionsGenericMeta(GenericMeta): + + def __subclasscheck__(self, subclass): + """This mimics a more modern GenericMeta.__subclasscheck__() logic + (that does not have problems with recursion) to work around interactions + between collections, typing, and typing_extensions on older + versions of Python, see https://github.com/python/typing/issues/501. + """ + if sys.version_info[None[:3]] >= (3, 5, 3) or sys.version_info[None[:3]] < (3, + 5, + 0): + if self.__origin__ is not None: + if sys._getframe(1).f_globals["__name__"] not in ('abc', 'functools'): + raise TypeError("Parameterized generics cannot be used with class or instance checks") + return False + else: + return self.__extra__ or super().__subclasscheck__(subclass) + res = self.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if self.__extra__ in subclass.__mro__: + return True + for scls in self.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + + return False + + + if _define_guard("Awaitable"): + + class Awaitable(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, extra=collections_abc.Awaitable): + __slots__ = () + + + elif _define_guard("Coroutine"): + + class Coroutine(Awaitable[V_co], typing.Generic[(T_co, T_contra, V_co)], metaclass=_ExtensionsGenericMeta, extra=collections_abc.Coroutine): + __slots__ = () + + + elif _define_guard("AsyncIterable"): + + class AsyncIterable(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, extra=collections_abc.AsyncIterable): + __slots__ = () + + + if _define_guard("AsyncIterator"): + + class AsyncIterator(AsyncIterable[T_co], metaclass=_ExtensionsGenericMeta, extra=collections_abc.AsyncIterator): + __slots__ = () + + + if hasattr(typing, "Deque"): + Deque = typing.Deque + else: + if _geqv_defined: + + class Deque(collections.deque, typing.MutableSequence[T], metaclass=_ExtensionsGenericMeta, extra=collections.deque): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Deque): + return (collections.deque)(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) + + + else: + + class Deque(collections.deque, typing.MutableSequence[T], metaclass=_ExtensionsGenericMeta, extra=collections.deque): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Deque: + return (collections.deque)(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) + + + if hasattr(typing, "ContextManager"): + ContextManager = typing.ContextManager + elif hasattr(contextlib, "AbstractContextManager"): + + class ContextManager(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, extra=contextlib.AbstractContextManager): + __slots__ = () + + + else: + + class ContextManager(typing.Generic[T_co]): + __slots__ = () + + def __enter__(self): + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + pass + + @classmethod + def __subclasshook__(cls, C): + if cls is ContextManager: + if any(("__enter__" in B.__dict__ for B in C.__mro__)): + if any(("__exit__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + +if hasattr(typing, "AsyncContextManager"): + AsyncContextManager = typing.AsyncContextManager + __all__.append("AsyncContextManager") +else: + if hasattr(contextlib, "AbstractAsyncContextManager"): + + class AsyncContextManager(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta, extra=contextlib.AbstractAsyncContextManager): + __slots__ = () + + + __all__.append("AsyncContextManager") + else: + if sys.version_info[None[:2]] >= (3, 5): + exec('\nclass AsyncContextManager(typing.Generic[T_co]):\n __slots__ = ()\n\n async def __aenter__(self):\n return self\n\n @abc.abstractmethod\n async def __aexit__(self, exc_type, exc_value, traceback):\n return None\n\n @classmethod\n def __subclasshook__(cls, C):\n if cls is AsyncContextManager:\n return _check_methods_in_mro(C, "__aenter__", "__aexit__")\n return NotImplemented\n\n__all__.append(\'AsyncContextManager\')\n') + else: + if hasattr(typing, "DefaultDict"): + DefaultDict = typing.DefaultDict + else: + if _geqv_defined: + + class DefaultDict(collections.defaultdict, typing.MutableMapping[(KT, VT)], metaclass=_ExtensionsGenericMeta, extra=collections.defaultdict): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, DefaultDict): + return (collections.defaultdict)(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) + + + else: + + class DefaultDict(collections.defaultdict, typing.MutableMapping[(KT, VT)], metaclass=_ExtensionsGenericMeta, extra=collections.defaultdict): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is DefaultDict: + return (collections.defaultdict)(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) + + + if hasattr(typing, "OrderedDict"): + OrderedDict = typing.OrderedDict + else: + if (3, 7, 0) <= sys.version_info[None[:3]] < (3, 7, 2): + OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) + else: + if _geqv_defined: + + class OrderedDict(collections.OrderedDict, typing.MutableMapping[(KT, VT)], metaclass=_ExtensionsGenericMeta, extra=collections.OrderedDict): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, OrderedDict): + return (collections.OrderedDict)(*args, **kwds) + return _generic_new(collections.OrderedDict, cls, *args, **kwds) + + + else: + + class OrderedDict(collections.OrderedDict, typing.MutableMapping[(KT, VT)], metaclass=_ExtensionsGenericMeta, extra=collections.OrderedDict): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is OrderedDict: + return (collections.OrderedDict)(*args, **kwds) + return _generic_new(collections.OrderedDict, cls, *args, **kwds) + + + if hasattr(typing, "Counter"): + Counter = typing.Counter + else: + if (3, 5, 0) <= sys.version_info[None[:3]] <= (3, 5, 1): + assert _geqv_defined + _TInt = typing.TypeVar("_TInt") + + class _CounterMeta(typing.GenericMeta): + __doc__ = "Metaclass for Counter" + + def __getitem__(self, item): + return super().__getitem__((item, int)) + + + class Counter(collections.Counter, typing.Dict[(T, int)], metaclass=_CounterMeta, extra=collections.Counter): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Counter): + return (collections.Counter)(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + + else: + if _geqv_defined: + + class Counter(collections.Counter, typing.Dict[(T, int)], metaclass=_ExtensionsGenericMeta, extra=collections.Counter): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Counter): + return (collections.Counter)(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + + else: + + class Counter(collections.Counter, typing.Dict[(T, int)], metaclass=_ExtensionsGenericMeta, extra=collections.Counter): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is Counter: + return (collections.Counter)(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + +if hasattr(typing, "ChainMap"): + ChainMap = typing.ChainMap + __all__.append("ChainMap") +else: + if hasattr(collections, "ChainMap"): + if _geqv_defined: + + class ChainMap(collections.ChainMap, typing.MutableMapping[(KT, VT)], metaclass=_ExtensionsGenericMeta, extra=collections.ChainMap): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, ChainMap): + return (collections.ChainMap)(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) + + + else: + + class ChainMap(collections.ChainMap, typing.MutableMapping[(KT, VT)], metaclass=_ExtensionsGenericMeta, extra=collections.ChainMap): + __slots__ = () + + def __new__(cls, *args, **kwds): + if cls._gorg is ChainMap: + return (collections.ChainMap)(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) + + + __all__.append("ChainMap") + elif _define_guard("AsyncGenerator"): + + class AsyncGenerator(AsyncIterator[T_co], typing.Generic[(T_co, T_contra)], metaclass=_ExtensionsGenericMeta, extra=collections_abc.AsyncGenerator): + __slots__ = () + + + else: + if hasattr(typing, "NewType"): + NewType = typing.NewType + else: + + def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + new_type.__name__ = name + new_type.__supertype__ = tp + return new_type + + + if hasattr(typing, "Text"): + Text = typing.Text + else: + Text = str + if hasattr(typing, "TYPE_CHECKING"): + TYPE_CHECKING = typing.TYPE_CHECKING + else: + TYPE_CHECKING = False + + def _gorg(cls): + """This function exists for compatibility with old typing versions.""" + assert isinstance(cls, GenericMeta) + if hasattr(cls, "_gorg"): + return cls._gorg + while cls.__origin__ is not None: + cls = cls.__origin__ + + return cls + + + if OLD_GENERICS: + + def _next_in_mro(cls): + """This function exists for compatibility with old typing versions.""" + next_in_mro = object + for i, c in enumerate(cls.__mro__[None[:-1]]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i + 1] + + return next_in_mro + + + _PROTO_WHITELIST = [ + 'Callable', 'Awaitable', + 'Iterable', + 'Iterator', 'AsyncIterable', 'AsyncIterator', + 'Hashable', + 'Sized', 'Container', 'Collection', 'Reversible', + 'ContextManager', + 'AsyncContextManager'] + + def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[None[:-1]]: + if base.__name__ in ('Protocol', 'Generic'): + continue + annotations = getattr(base, "__annotations__", {}) + for attr in list(base.__dict__.keys()) + list(annotations.keys()): + if attr.startswith("_abc_") or attr not in ('__abstractmethods__', + '__annotations__', + '__weakref__', '_is_protocol', + '_is_runtime_protocol', + '__dict__', '__args__', + '__slots__', '__next_in_mro__', + '__parameters__', + '__origin__', '__orig_bases__', + '__extra__', '__tree_hash__', + '__doc__', '__subclasshook__', + '__init__', '__new__', + '__module__', '_MutableMapping__marker', + '_gorg'): + attrs.add(attr) + + return attrs + + + def _is_callable_members_only(cls): + return all((callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))) + + + if hasattr(typing, "Protocol"): + Protocol = typing.Protocol + else: + if HAVE_PROTOCOLS and not PEP_560: + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError("Protocols cannot be instantiated") + + + class _ProtocolMeta(GenericMeta): + __doc__ = "Internal metaclass for Protocol.\n\n This exists so Protocol classes can be generic without deriving\n from Generic.\n " + if not OLD_GENERICS: + + def __new__(cls, name, bases, namespace, tvars=None, args=None, origin=None, extra=None, orig_bases=None): + if not extra is None: + raise AssertionError + else: + if tvars is not None: + assert origin is not None + assert all((isinstance(t, TypeVar) for t in tvars)), tvars + else: + tvars = _type_vars(bases) + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if isinstance(base, GenericMeta): + if base.__origin__ in (Generic, Protocol): + if gvars is not None: + raise TypeError("Cannot inherit from Generic[...] or Protocol[...] multiple times.") + gvars = base.__parameters__ + + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError("Some type variables (%s) are not listed in %s[%s]" % ( + ", ".join((str(t) for t in tvars if t not in gvarset)), + "Generic" if any((b.__origin__ is Generic for b in bases)) else "Protocol", + ", ".join((str(g) for g in gvars)))) + tvars = gvars + initial_bases = bases + if extra is not None: + if type(extra) is abc.ABCMeta and extra not in bases: + bases = ( + extra,) + bases + bases = tuple((_gorg(b) if isinstance(b, GenericMeta) else b for b in bases)) + if any((isinstance(b, GenericMeta) and b is not Generic for b in bases)): + bases = tuple((b for b in bases if b is not Generic)) + namespace.update({'__origin__':origin, '__extra__':extra}) + self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, _root=True) + super(GenericMeta, self).__setattr__("_gorg", self if not origin else _gorg(origin)) + self.__parameters__ = tvars + self.__args__ = tuple((... if a is _TypingEllipsis else () if a is _TypingEmpty else a for a in args)) if args else None + self.__next_in_mro__ = _next_in_mro(self) + if orig_bases is None: + self.__orig_bases__ = initial_bases + else: + if origin is not None: + self._abc_registry = origin._abc_registry + self._abc_cache = origin._abc_cache + if hasattr(self, "_subs_tree"): + self.__tree_hash__ = hash(self._subs_tree()) if origin else super(GenericMeta, self).__hash__() + return self + + def __init__(cls, *args, **kwargs): + (super().__init__)(*args, **kwargs) + if not cls.__dict__.get("_is_protocol", None): + cls._is_protocol = any((b is Protocol or isinstance(b, _ProtocolMeta) and b.__origin__ is Protocol for b in cls.__bases__)) + if cls._is_protocol: + for base in cls.__mro__[1[:None]]: + if base in (object, Generic) or base.__module__ == "collections.abc": + raise base.__name__ in _PROTO_WHITELIST or isinstance(base, TypingMeta) and base._is_protocol or isinstance(base, GenericMeta) and base.__origin__ is Generic or TypeError("Protocols can only inherit from other protocols, got %r" % base) + + cls.__init__ = _no_init + + def _proto_hook(other): + if not cls.__dict__.get("_is_protocol", None): + return NotImplemented + if not isinstance(other, type): + raise TypeError("issubclass() arg 1 must be a class") + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, "__annotations__", {}) + if isinstance(annotations, typing.Mapping) and attr in annotations and isinstance(other, _ProtocolMeta) and other._is_protocol: + break + else: + return NotImplemented + + return True + + if "__subclasshook__" not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + def __instancecheck__(self, instance): + if not getattr(self, "_is_protocol", False) or _is_callable_members_only(self): + if issubclass(instance.__class__, self): + return True + elif self._is_protocol and all((hasattr(instance, attr) and (not callable(getattr(self, attr, None)) or getattr(instance, attr) is not None) for attr in _get_protocol_attrs(self))): + return True + return super(GenericMeta, self).__instancecheck__(instance) + + def __subclasscheck__(self, cls): + if self.__origin__ is not None: + if sys._getframe(1).f_globals["__name__"] not in ('abc', + 'functools'): + raise TypeError("Parameterized generics cannot be used with class or instance checks") + else: + return False + if self.__dict__.get("_is_protocol", None): + if not self.__dict__.get("_is_runtime_protocol", None): + if sys._getframe(1).f_globals["__name__"] in ('abc', + 'functools', + 'typing'): + return False + raise TypeError("Instance and class checks can only be used with @runtime protocols") + if self.__dict__.get("_is_runtime_protocol", None) and not _is_callable_members_only(self): + if sys._getframe(1).f_globals["__name__"] in ('abc', + 'functools', + 'typing'): + return super(GenericMeta, self).__subclasscheck__(cls) + raise TypeError("Protocols with non-method members don't support issubclass()") + return super(GenericMeta, self).__subclasscheck__(cls) + + if not OLD_GENERICS: + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = ( + params,) + elif not params: + if _gorg(self) is not Tuple: + raise TypeError("Parameter list to %s[...] cannot be empty" % self.__qualname__) + else: + msg = "Parameters to generic types must be types." + params = tuple((_type_check(p, msg) for p in params)) + if self in (Generic, Protocol): + if not all((isinstance(p, TypeVar) for p in params)): + raise TypeError("Parameters to %r[...] must all be type variables" % self) + if len(set(params)) != len(params): + raise TypeError("Parameters to %r[...] must all be unique" % self) + tvars = params + args = params + else: + if self in (Tuple, Callable): + tvars = _type_vars(params) + args = params + else: + if self.__origin__ in (Generic, Protocol): + raise TypeError("Cannot subscript already-subscripted %s" % repr(self)) + else: + _check_generic(self, params) + tvars = _type_vars(params) + args = params + prepend = (self,) if self.__origin__ is None else () + return self.__class__((self.__name__), (prepend + self.__bases__), + (_no_slots_copy(self.__dict__)), + tvars=tvars, + args=args, + origin=self, + extra=(self.__extra__), + orig_bases=(self.__orig_bases__)) + + + class Protocol(metaclass=_ProtocolMeta): + __doc__ = "Base class for protocol classes. Protocol classes are defined as::\n\n class Proto(Protocol):\n def meth(self) -> int:\n ...\n\n Such classes are primarily used with static type checkers that recognize\n structural subtyping (static duck-typing), for example::\n\n class C:\n def meth(self) -> int:\n return 0\n\n def func(x: Proto) -> int:\n return x.meth()\n\n func(C()) # Passes static type check\n\n See PEP 544 for details. Protocol classes decorated with\n @typing_extensions.runtime act as simple-minded runtime protocol that checks\n only the presence of given attributes, ignoring their type signatures.\n\n Protocol classes can be generic, they are defined as::\n\n class GenProto({bases}):\n def meth(self) -> T:\n ...\n " + __slots__ = () + _is_protocol = True + + def __new__(cls, *args, **kwds): + if _gorg(cls) is Protocol: + raise TypeError("Type Protocol cannot be instantiated; it can be used only as a base class") + if OLD_GENERICS: + return _generic_new(_next_in_mro(cls), cls, *args, **kwds) + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + + if Protocol.__doc__ is not None: + Protocol.__doc__ = Protocol.__doc__.format(bases=("Protocol, Generic[T]" if OLD_GENERICS else "Protocol[T]")) + else: + if PEP_560: + from typing import _type_check, _GenericAlias, _collect_type_vars + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError("Protocols cannot be instantiated") + + + class _ProtocolMeta(abc.ABCMeta): + + def __instancecheck__(cls, instance): + if not getattr(cls, "_is_protocol", False) or _is_callable_members_only(cls): + if issubclass(instance.__class__, cls): + return True + elif cls._is_protocol and all((hasattr(instance, attr) and (not callable(getattr(cls, attr, None)) or getattr(instance, attr) is not None) for attr in _get_protocol_attrs(cls))): + return True + return super().__instancecheck__(instance) + + + class Protocol(metaclass=_ProtocolMeta): + __doc__ = "Base class for protocol classes. Protocol classes are defined as::\n\n class Proto(Protocol):\n def meth(self) -> int:\n ...\n\n Such classes are primarily used with static type checkers that recognize\n structural subtyping (static duck-typing), for example::\n\n class C:\n def meth(self) -> int:\n return 0\n\n def func(x: Proto) -> int:\n return x.meth()\n\n func(C()) # Passes static type check\n\n See PEP 544 for details. Protocol classes decorated with\n @typing_extensions.runtime act as simple-minded runtime protocol that checks\n only the presence of given attributes, ignoring their type signatures.\n\n Protocol classes can be generic, they are defined as::\n\n class GenProto(Protocol[T]):\n def meth(self) -> T:\n ...\n " + __slots__ = () + _is_protocol = True + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; it can only be used as a base class") + return super().__new__(cls) + + @_tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = ( + params,) + elif not params: + if cls is not Tuple: + raise TypeError("Parameter list to {}[...] cannot be empty".format(cls.__qualname__)) + else: + msg = "Parameters to generic types must be types." + params = tuple((_type_check(p, msg) for p in params)) + if cls is Protocol: + if not all((isinstance(p, TypeVar) for p in params)): + i = 0 + while isinstance(params[i], TypeVar): + i += 1 + + raise TypeError("Parameters to Protocol[...] must all be type variables. Parameter {} is {}".format(i + 1, params[i])) + if len(set(params)) != len(params): + raise TypeError("Parameters to Protocol[...] must all be unique") + else: + _check_generic(cls, params) + return _GenericAlias(cls, params) + + def __init_subclass__(cls, *args, **kwargs): + tvars = [] + if "__orig_bases__" in cls.__dict__: + error = Generic in cls.__orig_bases__ + else: + error = Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + if "__orig_bases__" in cls.__dict__: + tvars = _collect_type_vars(cls.__orig_bases__) + gvars = None + for base in cls.__orig_bases__: + if isinstance(base, _GenericAlias): + if base.__origin__ in (Generic, Protocol): + the_base = "Generic" if base.__origin__ is Generic else "Protocol" + if gvars is not None: + raise TypeError("Cannot inherit from Generic[...] and/or Protocol[...] multiple types.") + gvars = base.__parameters__ + + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + s_vars = ", ".join((str(t) for t in tvars if t not in gvarset)) + s_args = ", ".join((str(g) for g in gvars)) + raise TypeError("Some type variables ({}) are not listed in {}[{}]".format(s_vars, the_base, s_args)) + tvars = gvars + else: + cls.__parameters__ = tuple(tvars) + if not cls.__dict__.get("_is_protocol", None): + cls._is_protocol = any((b is Protocol for b in cls.__bases__)) + + def _proto_hook(other): + if not cls.__dict__.get("_is_protocol", None): + return NotImplemented + elif not getattr(cls, "_is_runtime_protocol", False): + if sys._getframe(2).f_globals["__name__"] in ('abc', + 'functools'): + return NotImplemented + raise TypeError("Instance and class checks can only be used with @runtime protocols") + if not _is_callable_members_only(cls): + if sys._getframe(2).f_globals["__name__"] in ('abc', + 'functools'): + return NotImplemented + raise TypeError("Protocols with non-method members don't support issubclass()") + assert isinstance(other, type), "issubclass() arg 1 must be a class" + for attr in _get_protocol_attrs(cls): + for base in other.__mro__: + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + annotations = getattr(base, "__annotations__", {}) + if isinstance(annotations, typing.Mapping) and attr in annotations and isinstance(other, _ProtocolMeta) and other._is_protocol: + break + else: + return NotImplemented + + return True + + if "__subclasshook__" not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + return cls._is_protocol or None + for base in cls.__bases__: + if base in (object, Generic) or base.__module__ == "collections.abc" and base.__name__ in _PROTO_WHITELIST or isinstance(base, _ProtocolMeta): + raise base._is_protocol or TypeError("Protocols can only inherit from other protocols, got %r" % base) + + cls.__init__ = _no_init + + +if hasattr(typing, "runtime_checkable"): + runtime_checkable = typing.runtime_checkable +else: + if HAVE_PROTOCOLS: + + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol, so that it + can be used with isinstance() and issubclass(). Raise TypeError + if applied to a non-protocol class. + + This allows a simple-minded structural check very similar to the + one-offs in collections.abc such as Hashable. + """ + if not (isinstance(cls, _ProtocolMeta) and cls._is_protocol): + raise TypeError("@runtime_checkable can be only applied to protocol classes, got %r" % cls) + cls._is_runtime_protocol = True + return cls + + + if HAVE_PROTOCOLS: + runtime = runtime_checkable + if hasattr(typing, "SupportsIndex"): + SupportsIndex = typing.SupportsIndex + else: + if HAVE_PROTOCOLS: + + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + + if sys.version_info >= (3, 9, 2): + TypedDict = typing.TypedDict + else: + + def _check_fails(cls, other): + try: + if sys._getframe(1).f_globals["__name__"] not in ('abc', 'functools', + 'typing'): + raise TypeError("TypedDict does not support instance and class checks") + except (AttributeError, ValueError): + pass + + return False + + + def _dict_new(*args, **kwargs): + if not args: + raise TypeError("TypedDict.__new__(): not enough arguments") + _, args = args[0], args[1[:None]] + return dict(*args, **kwargs) + + + _dict_new.__text_signature__ = "($cls, _typename, _fields=None, /, **kwargs)" + + def _typeddict_new(*args, total=True, **kwargs): + if not args: + raise TypeError("TypedDict.__new__(): not enough arguments") + else: + _, args = args[0], args[1[:None]] + if args: + typename, args = args[0], args[1[:None]] + else: + if "_typename" in kwargs: + typename = kwargs.pop("_typename") + import warnings + warnings.warn("Passing '_typename' as keyword argument is deprecated", DeprecationWarning, + stacklevel=2) + else: + raise TypeError("TypedDict.__new__() missing 1 required positional argument: '_typename'") + if args: + try: + fields, = args + except ValueError: + raise TypeError("TypedDict.__new__() takes from 2 to 3 positional arguments but {} were given".format(len(args) + 2)) + + else: + if "_fields" in kwargs: + if len(kwargs) == 1: + fields = kwargs.pop("_fields") + import warnings + warnings.warn("Passing '_fields' as keyword argument is deprecated", DeprecationWarning, + stacklevel=2) + else: + fields = None + elif fields is None: + fields = kwargs + else: + if kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments, but not both") + ns = {"__annotations__": (dict(fields))} + try: + ns["__module__"] = sys._getframe(1).f_globals.get("__name__", "__main__") + except (AttributeError, ValueError): + pass + + return _TypedDictMeta(typename, (), ns, total=total) + + + _typeddict_new.__text_signature__ = "($cls, _typename, _fields=None, /, *, total=True, **kwargs)" + + class _TypedDictMeta(type): + + def __init__(cls, name, bases, ns, total=True): + super(_TypedDictMeta, cls).__init__(name, bases, ns) + + def __new__Parse error at or near `LOAD_DICTCOMP' instruction at offset 80 + + __instancecheck__ = __subclasscheck__ = _check_fails + + + TypedDict = _TypedDictMeta("TypedDict", (dict,), {}) + TypedDict.__module__ = __name__ + TypedDict.__doc__ = "A simple typed name space. At runtime it is equivalent to a plain dict.\n\n TypedDict creates a dictionary type that expects all of its\n instances to have a certain set of keys, with each key\n associated with a value of a consistent type. This expectation\n is not checked at runtime but is only enforced by type checkers.\n Usage::\n\n class Point2D(TypedDict):\n x: int\n y: int\n label: str\n\n a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK\n b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check\n\n assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')\n\n The type info can be accessed via the Point2D.__annotations__ dict, and\n the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.\n TypedDict supports two additional equivalent forms::\n\n Point2D = TypedDict('Point2D', x=int, y=int, label=str)\n Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})\n\n The class syntax is only supported in Python 3.6+, while two other\n syntax forms work for Python 2.7 and 3.2+\n " +if hasattr(typing, "Annotated"): + Annotated = typing.Annotated + get_type_hints = typing.get_type_hints + _AnnotatedAlias = typing._AnnotatedAlias +else: + if PEP_560: + + class _AnnotatedAlias(typing._GenericAlias, _root=True): + __doc__ = "Runtime representation of an annotated type.\n\n At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'\n with extra annotations. The alias behaves like a normal typing alias,\n instantiating is the same as instantiating the underlying type, binding\n it to types is also the same.\n " + + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return "typing_extensions.Annotated[{}, {}]".format(typing._type_repr(self.__origin__), ", ".join((repr(a) for a in self.__metadata__))) + + def __reduce__(self): + return ( + operator.getitem, + ( + Annotated, (self.__origin__,) + self.__metadata__)) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + + class Annotated: + __doc__ = "Add context specific metadata to a type.\n\n Example: Annotated[int, runtime_check.Unsigned] indicates to the\n hypothetical runtime_check module that this type is an unsigned int.\n Every other consumer of this type can ignore this metadata and treat\n this type as int.\n\n The first argument to Annotated must be a valid type (and will be in\n the __origin__ field), the remaining arguments are kept as a tuple in\n the __extra__ field.\n\n Details:\n\n - It's an error to call `Annotated` with less than two arguments.\n - Nested Annotated are flattened::\n\n Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]\n\n - Instantiating an annotated type is equivalent to instantiating the\n underlying type::\n\n Annotated[C, Ann1](5) == C(5)\n\n - Annotated can be used as a generic type alias::\n\n Optimized = Annotated[T, runtime.Optimize()]\n Optimized[int] == Annotated[int, runtime.Optimize()]\n\n OptimizedList = Annotated[List[T], runtime.Optimize()]\n OptimizedList[int] == Annotated[List[int], runtime.Optimize()]\n " + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @_tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be used with at least two arguments (a type and an annotation).") + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1[:None]]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError("Cannot subclass {}.Annotated".format(cls.__module__)) + + + def _strip_annotations(t): + """Strips the annotations from a given type. + """ + if isinstance(t, _AnnotatedAlias): + return _strip_annotations(t.__origin__) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple((_strip_annotations(a) for a in t.__args__)) + if stripped_args == t.__args__: + return t + res = t.copy_with(stripped_args) + res._special = t._special + return res + return t + + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_annotations(t) for k, t in hint.items()} + + + else: + if HAVE_ANNOTATED: + + def _is_dunder(name): + """Returns True if name is a __dunder_variable_name__.""" + return len(name) > 4 and name.startswith("__") and name.endswith("__") + + + class AnnotatedMeta(typing.GenericMeta): + __doc__ = "Metaclass for Annotated" + + def __new__(cls, name, bases, namespace, **kwargs): + if any((b is not object for b in bases)): + raise TypeError("Cannot subclass " + str(Annotated)) + return (super().__new__)(cls, name, bases, namespace, **kwargs) + + @property + def __metadata__(self): + return self._subs_tree()[2] + + def _tree_repr(self, tree): + cls, origin, metadata = tree + if not isinstance(origin, tuple): + tp_repr = typing._type_repr(origin) + else: + tp_repr = origin[0]._tree_repr(origin) + metadata_reprs = ", ".join((repr(arg) for arg in metadata)) + return "%s[%s, %s]" % (cls, tp_repr, metadata_reprs) + + def _subs_tree(self, tvars=None, args=None): + if self is Annotated: + return Annotated + res = super()._subs_tree(tvars=tvars, args=args) + if isinstance(res[1], tuple): + if res[1][0] is Annotated: + sub_tp = res[1][1] + sub_annot = res[1][2] + return (Annotated, sub_tp, sub_annot + res[2]) + return res + + def _get_cons(self): + """Return the class used to create instance of this type.""" + if self.__origin__ is None: + raise TypeError("Cannot get the underlying type of a non-specialized Annotated type.") + tree = self._subs_tree() + while isinstance(tree, tuple) and tree[0] is Annotated: + tree = tree[1] + + if isinstance(tree, tuple): + return tree[0] + return tree + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = ( + params,) + elif self.__origin__ is not None: + return super().__getitem__(params) + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be instantiated with at least two arguments (a type and an annotation).") + else: + msg = "Annotated[t, ...]: t must be a type." + tp = typing._type_check(params[0], msg) + metadata = tuple(params[1[:None]]) + return self.__class__((self.__name__), + (self.__bases__), + (_no_slots_copy(self.__dict__)), + tvars=(_type_vars((tp,))), + args=( + tp, metadata), + origin=self) + + def __call__(self, *args, **kwargs): + cons = self._get_cons() + result = cons(*args, **kwargs) + try: + result.__orig_class__ = self + except AttributeError: + pass + + return result + + def __getattr__(self, attr): + if self.__origin__ is not None: + if not _is_dunder(attr): + return getattr(self._get_cons(), attr) + raise AttributeError(attr) + + def __setattr__(self, attr, value): + if _is_dunder(attr) or attr.startswith("_abc_"): + super().__setattr__(attr, value) + else: + if self.__origin__ is None: + raise AttributeError(attr) + else: + setattr(self._get_cons(), attr, value) + + def __instancecheck__(self, obj): + raise TypeError("Annotated cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Annotated cannot be used with issubclass().") + + + class Annotated(metaclass=AnnotatedMeta): + __doc__ = "Add context specific metadata to a type.\n\n Example: Annotated[int, runtime_check.Unsigned] indicates to the\n hypothetical runtime_check module that this type is an unsigned int.\n Every other consumer of this type can ignore this metadata and treat\n this type as int.\n\n The first argument to Annotated must be a valid type, the remaining\n arguments are kept as a tuple in the __metadata__ field.\n\n Details:\n\n - It's an error to call `Annotated` with less than two arguments.\n - Nested Annotated are flattened::\n\n Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]\n\n - Instantiating an annotated type is equivalent to instantiating the\n underlying type::\n\n Annotated[C, Ann1](5) == C(5)\n\n - Annotated can be used as a generic type alias::\n\n Optimized = Annotated[T, runtime.Optimize()]\n Optimized[int] == Annotated[int, runtime.Optimize()]\n\n OptimizedList = Annotated[List[T], runtime.Optimize()]\n OptimizedList[int] == Annotated[List[int], runtime.Optimize()]\n " + + + if sys.version_info[None[:2]] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args + else: + if PEP_560: + from typing import _GenericAlias + try: + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = _GenericAlias + + try: + from typing import GenericAlias + except ImportError: + GenericAlias = _GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance(tp, (_GenericAlias, GenericAlias, _BaseGenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is Generic: + return Generic + + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return ( + tp.__origin__,) + tp.__metadata__ + if isinstance(tp, (_GenericAlias, GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable: + if res[0] is not Ellipsis: + res = ( + list(res[None[:-1]]), res[-1]) + return res + return () + + + if hasattr(typing, "TypeAlias"): + TypeAlias = typing.TypeAlias + else: + if sys.version_info[None[:2]] >= (3, 9): + + class _TypeAliasForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return "typing_extensions." + self._name + + + @_TypeAliasForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError("{} is not subscriptable".format(self)) + + + else: + if sys.version_info[None[:2]] >= (3, 7): + + class _TypeAliasForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return "typing_extensions." + self._name + + + TypeAlias = _TypeAliasForm("TypeAlias", doc="Special marker indicating that an assignment should\n be recognized as a proper type alias definition by type\n checkers.\n\n For example::\n\n Predicate: TypeAlias = Callable[..., bool]\n\n It's invalid when used anywhere except as in the example\n above.") + else: + if hasattr(typing, "_FinalTypingBase"): + + class _TypeAliasMeta(typing.TypingMeta): + __doc__ = "Metaclass for TypeAlias" + + def __repr__(self): + return "typing_extensions.TypeAlias" + + + class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True): + __doc__ = "Special marker indicating that an assignment should\n be recognized as a proper type alias definition by type\n checkers.\n\n For example::\n\n Predicate: TypeAlias = Callable[..., bool]\n\n It's invalid when used anywhere except as in the example above.\n " + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("TypeAlias cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("TypeAlias cannot be used with issubclass().") + + def __repr__(self): + return "typing_extensions.TypeAlias" + + + TypeAlias = _TypeAliasBase(_root=True) + else: + + class _TypeAliasMeta(typing.TypingMeta): + __doc__ = "Metaclass for TypeAlias" + + def __instancecheck__(self, obj): + raise TypeError("TypeAlias cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("TypeAlias cannot be used with issubclass().") + + def __call__(self, *args, **kwargs): + raise TypeError("Cannot instantiate TypeAlias") + + + class TypeAlias(metaclass=_TypeAliasMeta, _root=True): + __doc__ = "Special marker indicating that an assignment should\n be recognized as a proper type alias definition by type\n checkers.\n\n For example::\n\n Predicate: TypeAlias = Callable[..., bool]\n\n It's invalid when used anywhere except as in the example above.\n " + __slots__ = () + + + if hasattr(typing, "ParamSpecArgs"): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs + else: + + class _Immutable: + __doc__ = "Mixin to indicate that object should not be copied." + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + + class ParamSpecArgs(_Immutable): + __doc__ = "The args for a ParamSpec object.\n\n Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.\n\n ParamSpecArgs objects have a reference back to their ParamSpec:\n\n P.args.__origin__ is P\n\n This type is meant for runtime introspection and has no special meaning to\n static type checkers.\n " + + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return "{}.args".format(self.__origin__.__name__) + + + class ParamSpecKwargs(_Immutable): + __doc__ = "The kwargs for a ParamSpec object.\n\n Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.\n\n ParamSpecKwargs objects have a reference back to their ParamSpec:\n\n P.kwargs.__origin__ is P\n\n This type is meant for runtime introspection and has no special meaning to\n static type checkers.\n " + + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return "{}.kwargs".format(self.__origin__.__name__) + + + if hasattr(typing, "ParamSpec"): + ParamSpec = typing.ParamSpec + else: + + class ParamSpec(list): + __doc__ = "Parameter specification variable.\n\n Usage::\n\n P = ParamSpec('P')\n\n Parameter specification variables exist primarily for the benefit of static\n type checkers. They are used to forward the parameter types of one\n callable to another callable, a pattern commonly found in higher order\n functions and decorators. They are only valid when used in ``Concatenate``,\n or s the first argument to ``Callable``. In Python 3.10 and higher,\n they are also supported in user-defined Generics at runtime.\n See class Generic for more information on generic types. An\n example for annotating a decorator::\n\n T = TypeVar('T')\n P = ParamSpec('P')\n\n def add_logging(f: Callable[P, T]) -> Callable[P, T]:\n '''A type-safe decorator to add logging to a function.'''\n def inner(*args: P.args, **kwargs: P.kwargs) -> T:\n logging.info(f'{f.__name__} was called')\n return f(*args, **kwargs)\n return inner\n\n @add_logging\n def add_two(x: float, y: float) -> float:\n '''Add two numbers together.'''\n return x + y\n\n Parameter specification variables defined with covariant=True or\n contravariant=True can be used to declare covariant or contravariant\n generic types. These keyword arguments are valid, but their actual semantics\n are yet to be decided. See PEP 612 for details.\n\n Parameter specification variables can be introspected. e.g.:\n\n P.__name__ == 'T'\n P.__bound__ == None\n P.__covariant__ == False\n P.__contravariant__ == False\n\n Note that only parameter specification variables defined in global scope can\n be pickled.\n " + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False): + super().__init__([self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if bound: + self.__bound__ = typing._type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + try: + def_mod = sys._getframe(1).f_globals.get("__name__", "__main__") + except (AttributeError, ValueError): + def_mod = None + + if def_mod != "typing_extensions": + self.__module__ = def_mod + + def __repr__(self): + if self.__covariant__: + prefix = "+" + else: + if self.__contravariant__: + prefix = "-" + else: + prefix = "~" + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __call__(self, *args, **kwargs): + pass + + + class _ConcatenateGenericAlias(list): + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return "{origin}[{args}]".format(origin=(_type_repr(self.__origin__)), + args=(", ".join((_type_repr(arg) for arg in self.__args__)))) + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + + @_tp_cache + def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + elif not isinstance(parameters, tuple): + parameters = ( + parameters,) + assert isinstance(parameters[-1], ParamSpec), "The last parameter to Concatenate should be a ParamSpec variable." + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple((typing._type_check(p, msg) for p in parameters)) + return _ConcatenateGenericAlias(self, parameters) + + + if hasattr(typing, "Concatenate"): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias + else: + if sys.version_info[None[:2]] >= (3, 9): + + @_TypeAliasForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) + + + else: + if sys.version_info[None[:2]] >= (3, 7): + + class _ConcatenateForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return "typing_extensions." + self._name + + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + + Concatenate = _ConcatenateForm("Concatenate", doc="Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a\n higher order function which adds, removes or transforms parameters of a\n callable.\n\n For example::\n\n Callable[Concatenate[int, P], int]\n\n See PEP 612 for detailed information.\n ") + else: + if hasattr(typing, "_FinalTypingBase"): + + class _ConcatenateAliasMeta(typing.TypingMeta): + __doc__ = "Metaclass for Concatenate." + + def __repr__(self): + return "typing_extensions.Concatenate" + + + class _ConcatenateAliasBase(typing._FinalTypingBase, metaclass=_ConcatenateAliasMeta, _root=True): + __doc__ = "Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a\n higher order function which adds, removes or transforms parameters of a\n callable.\n\n For example::\n\n Callable[Concatenate[int, P], int]\n\n See PEP 612 for detailed information.\n " + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Concatenate cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Concatenate cannot be used with issubclass().") + + def __repr__(self): + return "typing_extensions.Concatenate" + + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + + Concatenate = _ConcatenateAliasBase(_root=True) + else: + + class _ConcatenateAliasMeta(typing.TypingMeta): + __doc__ = "Metaclass for Concatenate." + + def __instancecheck__(self, obj): + raise TypeError("TypeAlias cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("TypeAlias cannot be used with issubclass().") + + def __call__(self, *args, **kwargs): + raise TypeError("Cannot instantiate TypeAlias") + + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + + class Concatenate(metaclass=_ConcatenateAliasMeta, _root=True): + __doc__ = "Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a\n higher order function which adds, removes or transforms parameters of a\n callable.\n\n For example::\n\n Callable[Concatenate[int, P], int]\n\n See PEP 612 for detailed information.\n " + __slots__ = () + + + if hasattr(typing, "TypeGuard"): + TypeGuard = typing.TypeGuard + else: + if sys.version_info[None[:2]] >= (3, 9): + + class _TypeGuardForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return "typing_extensions." + self._name + + + @_TypeGuardForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, "{} accepts only single type.".format(self)) + return _GenericAlias(self, (item,)) + + + else: + if sys.version_info[None[:2]] >= (3, 7): + + class _TypeGuardForm(typing._SpecialForm, _root=True): + + def __repr__(self): + return "typing_extensions." + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, "{} accepts only a single type".format(self._name)) + return _GenericAlias(self, (item,)) + + + TypeGuard = _TypeGuardForm("TypeGuard", + doc='Special typing form used to annotate the return type of a user-defined\n type guard function. ``TypeGuard`` only accepts a single type argument.\n At runtime, functions marked this way should return a boolean.\n\n ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static\n type checkers to determine a more precise type of an expression within a\n program\'s code flow. Usually type narrowing is done by analyzing\n conditional code flow and applying the narrowing to a block of code. The\n conditional expression here is sometimes referred to as a "type guard".\n\n Sometimes it would be convenient to use a user-defined boolean function\n as a type guard. Such a function should use ``TypeGuard[...]`` as its\n return type to alert static type checkers to this intention.\n\n Using ``-> TypeGuard`` tells the static type checker that for a given\n function:\n\n 1. The return value is a boolean.\n 2. If the return value is ``True``, the type of its argument\n is the type inside ``TypeGuard``.\n\n For example::\n\n def is_str(val: Union[str, float]):\n # "isinstance" type guard\n if isinstance(val, str):\n # Type of ``val`` is narrowed to ``str``\n ...\n else:\n # Else, type of ``val`` is narrowed to ``float``.\n ...\n\n Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower\n form of ``TypeA`` (it can even be a wider form) and this may lead to\n type-unsafe results. The main reason is to allow for things like\n narrowing ``List[object]`` to ``List[str]`` even though the latter is not\n a subtype of the former, since ``List`` is invariant. The responsibility of\n writing type-safe type guards is left to the user.\n\n ``TypeGuard`` also works with type variables. For more information, see\n PEP 647 (User-Defined Type Guards).\n ') + else: + if hasattr(typing, "_FinalTypingBase"): + + class _TypeGuard(typing._FinalTypingBase, _root=True): + __doc__ = 'Special typing form used to annotate the return type of a user-defined\n type guard function. ``TypeGuard`` only accepts a single type argument.\n At runtime, functions marked this way should return a boolean.\n\n ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static\n type checkers to determine a more precise type of an expression within a\n program\'s code flow. Usually type narrowing is done by analyzing\n conditional code flow and applying the narrowing to a block of code. The\n conditional expression here is sometimes referred to as a "type guard".\n\n Sometimes it would be convenient to use a user-defined boolean function\n as a type guard. Such a function should use ``TypeGuard[...]`` as its\n return type to alert static type checkers to this intention.\n\n Using ``-> TypeGuard`` tells the static type checker that for a given\n function:\n\n 1. The return value is a boolean.\n 2. If the return value is ``True``, the type of its argument\n is the type inside ``TypeGuard``.\n\n For example::\n\n def is_str(val: Union[str, float]):\n # "isinstance" type guard\n if isinstance(val, str):\n # Type of ``val`` is narrowed to ``str``\n ...\n else:\n # Else, type of ``val`` is narrowed to ``float``.\n ...\n\n Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower\n form of ``TypeA`` (it can even be a wider form) and this may lead to\n type-unsafe results. The main reason is to allow for things like\n narrowing ``List[object]`` to ``List[str]`` even though the latter is not\n a subtype of the former, since ``List`` is invariant. The responsibility of\n writing type-safe type guards is left to the user.\n\n ``TypeGuard`` also works with type variables. For more information, see\n PEP 647 (User-Defined Type Guards).\n ' + __slots__ = ('__type__', ) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls((typing._type_check(item, "{} accepts only a single type.".format(cls.__name__[1[:None]]))), + _root=True) + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1[:None]])) + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += "[{}]".format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _TypeGuard): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + + TypeGuard = _TypeGuard(_root=True) + else: + + class _TypeGuardMeta(typing.TypingMeta): + __doc__ = "Metaclass for TypeGuard" + + def __new__(cls, name, bases, namespace, tp=None, _root=False): + self = super().__new__(cls, name, bases, namespace, _root=_root) + if tp is not None: + self.__type__ = tp + return self + + def __instancecheck__(self, obj): + raise TypeError("TypeGuard cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("TypeGuard cannot be used with issubclass().") + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is not None: + raise TypeError("{} cannot be further subscripted".format(cls.__name__[1[:None]])) + param = typing._type_check(item, "{} accepts only single type.".format(cls.__name__[1[:None]])) + return cls((self.__name__), (self.__bases__), (dict(self.__dict__)), + tp=param, _root=True) + + def _eval_type(self, globalns, localns): + new_tp = typing._eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)((self.__name__), (self.__bases__), (dict(self.__dict__)), + tp=(self.__type__), _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += "[{}]".format(typing._type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not hasattr(other, "__type__"): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + + class TypeGuard(typing.Final, metaclass=_TypeGuardMeta, _root=True): + __doc__ = 'Special typing form used to annotate the return type of a user-defined\n type guard function. ``TypeGuard`` only accepts a single type argument.\n At runtime, functions marked this way should return a boolean.\n\n ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static\n type checkers to determine a more precise type of an expression within a\n program\'s code flow. Usually type narrowing is done by analyzing\n conditional code flow and applying the narrowing to a block of code. The\n conditional expression here is sometimes referred to as a "type guard".\n\n Sometimes it would be convenient to use a user-defined boolean function\n as a type guard. Such a function should use ``TypeGuard[...]`` as its\n return type to alert static type checkers to this intention.\n\n Using ``-> TypeGuard`` tells the static type checker that for a given\n function:\n\n 1. The return value is a boolean.\n 2. If the return value is ``True``, the type of its argument\n is the type inside ``TypeGuard``.\n\n For example::\n\n def is_str(val: Union[str, float]):\n # "isinstance" type guard\n if isinstance(val, str):\n # Type of ``val`` is narrowed to ``str``\n ...\n else:\n # Else, type of ``val`` is narrowed to ``float``.\n ...\n\n Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower\n form of ``TypeA`` (it can even be a wider form) and this may lead to\n type-unsafe results. The main reason is to allow for things like\n narrowing ``List[object]`` to ``List[str]`` even though the latter is not\n a subtype of the former, since ``List`` is invariant. The responsibility of\n writing type-safe type guards is left to the user.\n\n ``TypeGuard`` also works with type variables. For more information, see\n PEP 647 (User-Defined Type Guards).\n ' + __type__ = None \ No newline at end of file diff --git a/APPS_UNCOMPILED/lib/zipp.py b/APPS_UNCOMPILED/lib/zipp.py new file mode 100644 index 0000000..56fadeb --- /dev/null +++ b/APPS_UNCOMPILED/lib/zipp.py @@ -0,0 +1,240 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/zipp.py +# Compiled at: 2024-04-18 03:12:57 +# Size of source mod 2**32: 8404 bytes +import io, posixpath, zipfile, itertools, contextlib, sys, pathlib +if sys.version_info < (3, 7): + from collections import OrderedDict +else: + OrderedDict = dict + +def _parents(path): + """ + Given a path with elements separated by + posixpath.sep, generate all parents of that path. + + >>> list(_parents('b/d')) + ['b'] + >>> list(_parents('/b/d/')) + ['/b'] + >>> list(_parents('b/d/f/')) + ['b/d', 'b'] + >>> list(_parents('b')) + [] + >>> list(_parents('')) + [] + """ + return itertools.islice(_ancestry(path), 1, None) + + +def _ancestry(path): + """ + Given a path with elements separated by + posixpath.sep, generate all elements of that path + + >>> list(_ancestry('b/d')) + ['b/d', 'b'] + >>> list(_ancestry('/b/d/')) + ['/b/d', '/b'] + >>> list(_ancestry('b/d/f/')) + ['b/d/f', 'b/d', 'b'] + >>> list(_ancestry('b')) + ['b'] + >>> list(_ancestry('')) + [] + """ + path = path.rstrip(posixpath.sep) + while path and path != posixpath.sep: + yield path + path, tail = posixpath.split(path) + + +_dedupe = OrderedDict.fromkeys + +def _difference(minuend, subtrahend): + """ + Return items in minuend not in subtrahend, retaining order + with O(1) lookup. + """ + return itertools.filterfalse(set(subtrahend).__contains__, minuend) + + +class CompleteDirs(zipfile.ZipFile): + __doc__ = "\n A ZipFile subclass that ensures that implied directories\n are always included in the namelist.\n " + + @staticmethod + def _implied_dirs(names): + parents = itertools.chain.from_iterable(map(_parents, names)) + as_dirs = (p + posixpath.sep for p in parents) + return _dedupe(_difference(as_dirs, names)) + + def namelist(self): + names = super(CompleteDirs, self).namelist() + return names + list(self._implied_dirs(names)) + + def _name_set(self): + return set(self.namelist()) + + def resolve_dir(self, name): + """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + names = self._name_set() + dirname = name + "/" + dir_match = name not in names and dirname in names + if dir_match: + return dirname + return name + + @classmethod + def make(cls, source): + """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + if isinstance(source, CompleteDirs): + return source + else: + return isinstance(source, zipfile.ZipFile) or cls(_pathlib_compat(source)) + if "r" not in source.mode: + cls = CompleteDirs + source.__class__ = cls + return source + + +class FastLookup(CompleteDirs): + __doc__ = "\n ZipFile subclass to ensure implicit\n dirs exist and are resolved rapidly.\n " + + def namelist(self): + with contextlib.suppress(AttributeError): + return self._FastLookup__names + self._FastLookup__names = super(FastLookup, self).namelist() + return self._FastLookup__names + + def _name_set(self): + with contextlib.suppress(AttributeError): + return self._FastLookup__lookup + self._FastLookup__lookup = super(FastLookup, self)._name_set() + return self._FastLookup__lookup + + +def _pathlib_compat(path): + """ + For path-like objects, convert to a filename for compatibility + on Python 3.6.1 and earlier. + """ + try: + return path.__fspath__() + except AttributeError: + return str(path) + + +class Path: + __doc__ = "\n A pathlib-compatible interface for zip files.\n\n Consider a zip file with this structure::\n\n .\n ├── a.txt\n └── b\n ├── c.txt\n └── d\n └── e.txt\n\n >>> data = io.BytesIO()\n >>> zf = zipfile.ZipFile(data, 'w')\n >>> zf.writestr('a.txt', 'content of a')\n >>> zf.writestr('b/c.txt', 'content of c')\n >>> zf.writestr('b/d/e.txt', 'content of e')\n >>> zf.filename = 'mem/abcde.zip'\n\n Path accepts the zipfile object itself or a filename\n\n >>> root = Path(zf)\n\n From there, several path operations are available.\n\n Directory iteration (including the zip file itself):\n\n >>> a, b = root.iterdir()\n >>> a\n Path('mem/abcde.zip', 'a.txt')\n >>> b\n Path('mem/abcde.zip', 'b/')\n\n name property:\n\n >>> b.name\n 'b'\n\n join with divide operator:\n\n >>> c = b / 'c.txt'\n >>> c\n Path('mem/abcde.zip', 'b/c.txt')\n >>> c.name\n 'c.txt'\n\n Read text:\n\n >>> c.read_text()\n 'content of c'\n\n existence:\n\n >>> c.exists()\n True\n >>> (b / 'missing.txt').exists()\n False\n\n Coercion to string:\n\n >>> import os\n >>> str(c).replace(os.sep, posixpath.sep)\n 'mem/abcde.zip/b/c.txt'\n\n At the root, ``name``, ``filename``, and ``parent``\n resolve to the zipfile. Note these attributes are not\n valid and will raise a ``ValueError`` if the zipfile\n has no filename.\n\n >>> root.name\n 'abcde.zip'\n >>> str(root.filename).replace(os.sep, posixpath.sep)\n 'mem/abcde.zip'\n >>> str(root.parent)\n 'mem'\n " + _Path__repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" + + def __init__(self, root, at=''): + """ + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ + self.root = FastLookup.make(root) + self.at = at + + def open(self, mode='r', *args, pwd=None, **kwargs): + """ + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ + if self.is_dir(): + raise IsADirectoryError(self) + zip_mode = mode[0] + if not self.exists(): + if zip_mode == "r": + raise FileNotFoundError(self) + stream = self.root.open((self.at), zip_mode, pwd=pwd) + if "b" in mode: + if args or kwargs: + raise ValueError("encoding args invalid for binary operation") + return stream + return (io.TextIOWrapper)(stream, *args, **kwargs) + + @property + def name(self): + return pathlib.Path(self.at).name or self.filename.name + + @property + def suffix(self): + return pathlib.Path(self.at).suffix or self.filename.suffix + + @property + def suffixes(self): + return pathlib.Path(self.at).suffixes or self.filename.suffixes + + @property + def stem(self): + return pathlib.Path(self.at).stem or self.filename.stem + + @property + def filename(self): + return pathlib.Path(self.root.filename).joinpath(self.at) + + def read_text(self, *args, **kwargs): + with (self.open)('r', *args, **kwargs) as strm: + return strm.read() + + def read_bytes(self): + with self.open("rb") as strm: + return strm.read() + + def _is_child(self, path): + return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") + + def _next(self, at): + return self.__class__(self.root, at) + + def is_dir(self): + return not self.at or self.at.endswith("/") + + def is_file(self): + return self.exists() and not self.is_dir() + + def exists(self): + return self.at in self.root._name_set() + + def iterdir(self): + if not self.is_dir(): + raise ValueError("Can't listdir a file") + subs = map(self._next, self.root.namelist()) + return filter(self._is_child, subs) + + def __str__(self): + return posixpath.join(self.root.filename, self.at) + + def __repr__(self): + return self._Path__repr.format(self=self) + + def joinpath(self, *other): + next = (posixpath.join)(self.at, *map(_pathlib_compat, other)) + return self._next(self.root.resolve_dir(next)) + + __truediv__ = joinpath + + @property + def parent(self): + if not self.at: + return self.filename.parent + parent_at = posixpath.dirname(self.at.rstrip("/")) + if parent_at: + parent_at += "/" + return self._next(parent_at) diff --git a/APPS_UNCOMPILED/src/__init__.py b/APPS_UNCOMPILED/src/__init__.py new file mode 100644 index 0000000..9f6eebc --- /dev/null +++ b/APPS_UNCOMPILED/src/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/__init__.py +# Compiled at: 2024-04-18 03:12:55 +pass diff --git a/APPS_UNCOMPILED/src/adapter/__init__.py b/APPS_UNCOMPILED/src/adapter/__init__.py new file mode 100644 index 0000000..7137484 --- /dev/null +++ b/APPS_UNCOMPILED/src/adapter/__init__.py @@ -0,0 +1,15 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/adapter/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 169 bytes +""" +Drivers +Created on 2021/5/31 +@author: Lius +""" +from adapter.opcua_server import OpcuaServer +__all__ = [ + "OpcuaServer"] diff --git a/APPS_UNCOMPILED/src/adapter/opcua_server.py b/APPS_UNCOMPILED/src/adapter/opcua_server.py new file mode 100644 index 0000000..333ae8b --- /dev/null +++ b/APPS_UNCOMPILED/src/adapter/opcua_server.py @@ -0,0 +1,208 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/adapter/opcua_server.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 8918 bytes +""" +Drivers +Created on 2021/5/26 +@author: Lius +""" +import os, sys, json, signal, logging, argparse, libevent, random, string +from opcua import ua, Server +from common.Logger import logger +from common.MQClient import MQClientLibevent +import common.InternalTopic as InternalTopic + +class OpcuaServer(object): + + def __init__(self, service_id, config_file): + self.base = libevent.Base() + self.service_id = service_id + self.config_file = config_file + self.opcuaserver = None + self.measure_node_maps = dict() + self.cont_acquisition = 1 + self.mqclient = None + self.cfg_info = dict() + self._OpcuaServer__register_signal() + + def __register_signal(self): + signal.signal(signal.SIGINT, self._OpcuaServer__signal_handler) + signal.signal(signal.SIGTERM, self._OpcuaServer__signal_handler) + + def __signal_handler(self, signalnum, handler): + logger.info("signal. signalnum: %s" % signalnum) + self.base.loopexit(0) + if self.opcuaserver: + self.opcuaserver.stop() + sys.exit() + + def _config_loader(self): + if not os.path.exists(self.config_file): + raise ValueError("File {} does not exist".format(self.config_file)) + logger.info("Loaded config files: %s", self.config_file) + with open((self.config_file), "r", encoding="utf-8-sig") as load_fd: + load_cfg = json.load(load_fd) + return load_cfg + + def dstype_to_uatype(self, dstype): + if dstype == "BIT": + return ua.VariantType.Boolean + if dstype == "SINT": + return ua.VariantType.SByte + if dstype == "BYTE": + return ua.VariantType.Byte + if dstype == "INT": + return ua.VariantType.Int16 + if dstype == "DINT": + return ua.VariantType.Int32 + if dstype == "WORD": + return ua.VariantType.UInt16 + if dstype == "DWORD": + return ua.VariantType.UInt32 + if dstype == "FLOAT": + return ua.VariantType.Float + if dstype == "STRING": + return ua.VariantType.String + if dstype == "BCD": + return ua.VariantType.UInt16 + if dstype == "ULONG": + return ua.VariantType.UInt64 + if dstype == "LONG": + return ua.VariantType.Int64 + if dstype == "DOUBLE": + return ua.VariantType.Double + return ua.VariantType.Null + + def _get_node_id_info(self, nd): + nd_ctrl_name = nd["ctrlName"] + nd_name = "2:%s.%s" % (nd_ctrl_name, nd["measureName"]) + if nd_ctrl_name not in self.measure_node_maps: + self.measure_node_maps[nd_ctrl_name] = dict() + var_type = self.dstype_to_uatype(nd["dataType"]) + def_val = ua.uatypes.get_default_value(var_type) + return (nd_name, def_val, var_type) + + def _init_opcua_server(self, opcua_cfg): + self.opcuaserver = Server() + self.opcuaserver._application_uri = "urn:device_supervisor:python:server" + self.opcuaserver.name = "Device Supervisor OpcUa Server" + self.opcuaserver.manufacturer_name = "OpcUa" + self.opcuaserver.set_endpoint("opc.tcp://0.0.0.0:%s" % opcua_cfg["port"]) + objects = self.opcuaserver.get_objects_node() + inobj = objects.add_object("ns=2;s=InObject", "2:InObject") + for nd in opcua_cfg["mapping_table"]: + nd_nm, def_val, var_typ = self._get_node_id_info(nd) + invar = inobj.add_variable(nd["nodeId"], nd_nm, def_val, var_typ) + nd_ctrl_name = nd["ctrlName"] + if nd_ctrl_name not in self.measure_node_maps: + self.measure_node_maps[nd_ctrl_name] = dict() + self.measure_node_maps[nd_ctrl_name][nd["measureName"]] = [ + invar, var_typ] + print(invar) + + root = self.opcuaserver.get_root_node() + print("I got root: ", root) + print("Childs are: ", root.get_children()) + self.opcuaserver.start() + + def _run_opcua_server(self, cfg): + self.cfg_info["version"] = cfg["version"] + self.cfg_info["timestamp"] = cfg["timestamp"] + client_id = "opcuaServer@" + "".join(random.sample(string.ascii_letters + string.digits, 8)) + self._init_mqclient(client_id) + if "misc" in cfg: + self.cont_acquisition = 1 + upper_level = cfg["misc"]["logLvl"].upper() + if upper_level == "DEBUG": + logger.set_level(logging.DEBUG) + else: + if upper_level == "INFO": + logger.set_level(logging.INFO) + else: + if upper_level == "WARN" or upper_level == "WARNING": + logger.set_level(logging.WARN) + else: + if upper_level == "ERROR": + logger.set_level(logging.ERROR) + if "opcuaServer" in cfg: + self._init_opcua_server(cfg["opcuaServer"]) + logger.info("Init OPC UA server end.") + self.base.loop() + + def publish_measures_data(self, data): + measures_payload = {"version": (self.cfg_info["version"])} + measures_payload["controllers"] = [ + data] + logger.info("Publish message: %s" % measures_payload) + self.publish_data(InternalTopic.EVENT_BUS_SOUTH_READ.format(driverServiceId=(self.service_id)), measures_payload) + + def publish_data(self, topic, message): + try: + if self.mqclient.is_ready(): + message = message if isinstance(message, (str, bytes)) else json.dumps(message) + return self.mqclient.publish(topic, message, qos=0) + return False + except Exception as e: + try: + logger.error("Publish message error. %s" % e) + finally: + e = None + del e + + def on_drivers_data(self, topic, payload): + payload = json.loads(payload) + if "controllers" not in payload: + return + ctrls = payload["controllers"] + for ctrl in ctrls: + if ctrl["name"] not in self.measure_node_maps: + return + dev_nodes = self.measure_node_maps[ctrl["name"]] + for measure in ctrl["measures"]: + if measure["name"] in dev_nodes: + dev_nodes[measure["name"]][0].set_value(measure["value"], dev_nodes[measure["name"]][1]) + + def _init_mqclient(self, client_id): + if not self.mqclient: + self.mqclient = MQClientLibevent(self.base, client_id) + self.mqclient.init_mqclient() + self.mqclient.add_sub(InternalTopic.EVENT_BUS_SOUTH_READ.format(driverServiceId="+"), self.on_drivers_data) + self.mqclient.add_sub(InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PING, self.on_sys_service_status_ping) + self.mqclient.connect() + + def on_sys_service_status_ping(self, topic, payload): + """This function is ping.""" + try: + payload = json.loads(payload) + if "message" in payload: + if payload["message"] == "ping": + resp_topic = InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PONG + payload = json.dumps({'message':"pong", 'ServiceId':self.service_id}) + logger.debug("Pong. topic: %s. payload: %s." % (resp_topic, payload)) + self.publish_data(resp_topic, payload) + except Exception as e: + try: + logger.error("Pong error. %s" % e) + finally: + e = None + del e + + +def main(argv=sys.argv): + ap = argparse.ArgumentParser(description="Opcua Server") + ap.add_argument("-s", "--service_id", action="store", required=True, help="Service id.") + ap.add_argument("-c", "--config", action="store", required=True, help="Config file path.") + args = ap.parse_args(argv[1[:None]]) + print(args.service_id) + print(args.config) + dev = OpcuaServer(args.service_id, args.config) + cfg = dev._config_loader() + dev._run_opcua_server(cfg) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/src/common/AlarmPolicy.py b/APPS_UNCOMPILED/src/common/AlarmPolicy.py new file mode 100644 index 0000000..4fa6a4c --- /dev/null +++ b/APPS_UNCOMPILED/src/common/AlarmPolicy.py @@ -0,0 +1,49 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/AlarmPolicy.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1120 bytes +""" +AlarmPolicy +Created on 2020/11/13 +@author: Zhengyb +""" +import copy +AlarmEqual = "eq" +AlarmNotEqual = "neq" +AlarmGreater = "gt" +AlarmLess = "lt" +AlarmGreaterEqual = "egt" +AlarmLessEqual = "elt" +AlarmAnd = "and" +AlarmOr = "or" +AlarmNone = "none" + +class AlarmPolicy(object): + + def __init__(self, name=''): + self.name = name + self.ctrlName = "" + self.measureName = "" + self.alarmLevel = 3 + self.cond1 = {'op':None, + 'value':None} + self.condOp = None + self.cond2 = {'op':None, + 'value':None} + self.content = "" + + def __loads_validate(self, json_obj): + return True + + def load_json_obj(self, json_obj): + if not self._AlarmPolicy__loads_validate(json_obj): + raise ValueError(json_obj) + for key in self.__dict__: + if key in json_obj: + self.__dict__[key] = copy.deepcopy(json_obj[key]) + + def dump_json_obj(self): + return copy.deepcopy(self.__dict__) diff --git a/APPS_UNCOMPILED/src/common/CloudType.py b/APPS_UNCOMPILED/src/common/CloudType.py new file mode 100644 index 0000000..102a498 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/CloudType.py @@ -0,0 +1,17 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/CloudType.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 272 bytes +StandardMQTT = "Standard MQTT" +InspurYunzhou = "Inspur Yunzhou" +HeilanV1_0 = "HeilanV1.0" +HeilanV2_0 = "HeilanV2.0" +AWSIoT = "AWS IoT" +AzureIoT = "Azure IoT" +AliyunIoT = "Aliyun IoT" +Erlang = "Erlang" +BaiYing = "We" +SparkPlugB = "SparkPlugB MQTT" diff --git a/APPS_UNCOMPILED/src/common/ConfigMisc.py b/APPS_UNCOMPILED/src/common/ConfigMisc.py new file mode 100644 index 0000000..a3ef3dd --- /dev/null +++ b/APPS_UNCOMPILED/src/common/ConfigMisc.py @@ -0,0 +1,70 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/ConfigMisc.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1934 bytes +""" +ConfigMisc +Created on 2020/11/13 +@author: Zhengyb +""" +import copy +ParityChkOdd = "o" +ParityChkEven = "e" +ParityChkNone = "n" + +class ConfigCom(object): + + def __init__(self, name=''): + self.name = name + self.baud = 9600 + self.bits = 8 + self.stopbits = 1 + self.parityChk = ParityChkNone + + def __loads_validate(self, json_obj): + return True + + def load_json_obj(self, json_obj): + if not self._ConfigCom__loads_validate(json_obj): + raise ValueError(json_obj) + for key in self.__dict__: + if key in json_obj: + self.__dict__[key] = copy.deepcopy(json_obj[key]) + + def dump_json_obj(self): + return copy.deepcopy(self.__dict__) + + +class ConfigMisc(object): + + def __init__(self, name=''): + self.maxAlarmRecordSz = 2000 + self.continuousAcquisition = 1 + self.coms = list() + + def __loads_validate(self, json_obj): + return True + + def load_json_obj(self, json_obj): + if not self._ConfigMisc__loads_validate(json_obj): + raise ValueError(json_obj) + self.maxAlarmRecordSz = json_obj["maxAlarmRecordSz"] + self.continuousAcquisition = json_obj["continuousAcquisition"] + for com_data in json_obj["coms"]: + com = ConfigCom() + com.load_json_obj(com_data) + self.coms.append(com) + + def dump_json_obj(self): + json_obj = dict() + json_obj["maxAlarmRecordSz"] = self.maxAlarmRecordSz + json_obj["continuousAcquisition"] = self.continuousAcquisition + json_obj["coms"] = list() + for com in self.coms: + com_obj = com.dump_json_obj() + json_obj["coms"].append(com_obj) + + return json_obj diff --git a/APPS_UNCOMPILED/src/common/ConfigParser.py b/APPS_UNCOMPILED/src/common/ConfigParser.py new file mode 100644 index 0000000..c66a38f --- /dev/null +++ b/APPS_UNCOMPILED/src/common/ConfigParser.py @@ -0,0 +1,143 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/ConfigParser.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4295 bytes +import re +from common.DataType import Bool, Bit + +class ConfigParser(object): + + def parse_snap7_addr(self, var): + """ + parse address from: + {"addr": "I5.0"} + to + { + "regType": "I", + "dbnumber": 0, + "regAddr": 5 + "regBit": 0 + + } + :param var: + :return: + """ + addr = var["addr"].split(".") + if re.match(Bool, var["dataType"], re.M | re.I) or re.match(Bit, var["dataType"], re.M | re.I): + if re.match("db", var["addr"], re.M | re.I): + var["regType"] = "DB" + var["dbnumber"] = addr[0][2[:None]] + var["regAddr"] = addr[1] + var["regBit"] = addr[2] + else: + var["regType"] = addr[0][0] + var["dbnumber"] = 0 + var["regAddr"] = addr[0][1[:None]] + var["regBit"] = addr[1] + else: + if re.match("db", var["addr"], re.M | re.I): + var["regType"] = "DB" + var["dbnumber"] = addr[0][2[:None]] + var["regAddr"] = addr[1] + else: + var["regType"] = addr[0][0] + var["dbnumber"] = 0 + var["regAddr"] = addr[0][1[:None]] + return var + + def parse_opcua_addr(self, var): + """ + parse address from: + {"addr": "ns=10;i=12"} + to + { + "index": "10", + "idType": "number", + "identifier": 12 + + } + :param var: + :return: + """ + addr = str(var["addr"]) + if ";" in addr: + addr_list = addr.split(";") + var["index"] = addr_list[0].split("ns=")[1] + idt_n_ide = addr_list[1].split("=") + else: + var["index"] = 0 + idt_n_ide = addr.split("=") + if idt_n_ide[0] == "s": + var["idType"] = "string" + var["identifier"] = idt_n_ide[1] + else: + if idt_n_ide[0] == "i": + var["idType"] = "number" + var["identifier"] = int(idt_n_ide[1]) + else: + if idt_n_ide[0] == "G": + var["idType"] = "GUID" + var["identifier"] = idt_n_ide[1] + else: + var["idType"] = "OPAQUE" + var["identifier"] = idt_n_ide[1] + return var + + def parse_modbus_addr(self, var): + """ + parse address from {"addr": "40001.1"} to {"regAddr": 40001, "regBit": 1} + :param var: + :return: var + """ + addr = str(var["addr"]) + if var["dataType"] == "BIT": + if "." in addr: + addr_list = addr.split(".") + var["regAddr"] = addr_list[0] + var["regBit"] = int(addr_list[1]) + else: + var["regAddr"] = addr + if "regAddr" not in var: + var["regAddr"] = var["addr"] + return var + + def parse_omron_addr(self, var): + """ + parse address from {"addr": "D100.2"} to {"regAddr": 40001, "regBit": 1} + :param var: + :return: var + """ + addr = str(var["addr"]) + if var["dataType"] == "BIT": + if "." in addr: + addr_list = addr.split(".") + var["regAddr"] = addr_list[0][1[:None]] + var["regType"] = addr_list[0][0] + var["regBit"] = int(addr_list[1]) + else: + var["regAddr"] = addr + else: + var["regAddr"] = addr[1[:None]] + var["regType"] = addr[0] + var["regBit"] = "" + if "regAddr" not in var: + var["regAddr"] = var["addr"] + return var + + def parse_ctl(self, ctl): + ctl_data = dict() + ctl_data["name"] = ctl["name"] + ctl_data["protocol"] = ctl["protocol"] + ctl_data["endpoint"] = ctl["endpoint"] + ctl_data["args"] = ctl["args"] + ctl_data["samplePeriod"] = ctl["samplePeriod"] + ctl_data["expired"] = ctl["expired"] + return ctl_data + + def parse_measure(self, measure): + measure_data = dict() + measure_data["name"] = measure["name"] + return measure_data diff --git a/APPS_UNCOMPILED/src/common/Constant.py b/APPS_UNCOMPILED/src/common/Constant.py new file mode 100644 index 0000000..077c83d --- /dev/null +++ b/APPS_UNCOMPILED/src/common/Constant.py @@ -0,0 +1,106 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/Constant.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 8601 bytes +import uuid + +class AppInfo(object): + VENDOR = "InHand" + APP_NAME = "device_supervisor" + MQTT_CLIENT_NAMESPACE = uuid.UUID(int=24197857161011715162171839636988778096L) + + +class Const(object): + + class ConstError(TypeError): + pass + + class ConstCaseError(ConstError): + pass + + def __setattr__(self, key, value): + if key in self.__dict__.keys(): + raise self.ConstError("Can't change a const variable: '%s'" % key) + if not key.isupper(): + raise self.ConstCaseError("Const variable must be combined with upper letters:'%s'" % key) + self.__dict__[key] = value + + +class WebRequest(object): + __doc__ = "\n webUI request topics\n " + WEB_REQUEST_PREFIX = "httpreq" + WEB_REQUEST_MIDDLE = "/v1/apps/device/supervisor2/" + WEB_REQUEST_SUFFIX = "/#" + WEB_REQUEST = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + WEB_REQUEST_SUFFIX + CTL_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "controller/post" + WEB_REQUEST_SUFFIX + CTL_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "controller/get" + WEB_REQUEST_SUFFIX + CTL_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "controller/put" + WEB_REQUEST_SUFFIX + CTL_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "controller/delete/put" + WEB_REQUEST_SUFFIX + CTL_ST = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "controller/status" + WEB_REQUEST_SUFFIX + MEASURE_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "measure/post" + WEB_REQUEST_SUFFIX + MEASURE_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "measure/get" + WEB_REQUEST_SUFFIX + MEASURE_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "measure/put" + WEB_REQUEST_SUFFIX + MEASURE_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "measure/delete/put" + WEB_REQUEST_SUFFIX + MEASURE_GRP = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "measure/group" + WEB_REQUEST_SUFFIX + MEASURE_READ = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "measure/reading" + WEB_REQUEST_SUFFIX + MEASURE_WRITE = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "measure/writing" + WEB_REQUEST_SUFFIX + GROUP_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "group/post" + WEB_REQUEST_SUFFIX + GROUP_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "group/get" + WEB_REQUEST_SUFFIX + GROUP_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "group/put" + WEB_REQUEST_SUFFIX + GROUP_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "group/delete/put" + WEB_REQUEST_SUFFIX + COM_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "serial/get" + WEB_REQUEST_SUFFIX + COM_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "serial/put" + WEB_REQUEST_SUFFIX + ALM_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "alarm/post" + WEB_REQUEST_SUFFIX + ALM_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "alarm/get" + WEB_REQUEST_SUFFIX + ALM_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "alarm/put" + WEB_REQUEST_SUFFIX + ALM_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "alarm/delete/put" + WEB_REQUEST_SUFFIX + CLD_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "cloud/get" + WEB_REQUEST_SUFFIX + CLD_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "cloud/put" + WEB_REQUEST_SUFFIX + CLD_ST = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "cloud/status" + WEB_REQUEST_SUFFIX + LAB_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "label/post" + WEB_REQUEST_SUFFIX + LAB_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "label/get" + WEB_REQUEST_SUFFIX + LAB_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "label/put" + WEB_REQUEST_SUFFIX + LAB_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "label/delete/put" + WEB_REQUEST_SUFFIX + PARAM_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "params/get" + WEB_REQUEST_SUFFIX + PARAM_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "params/put" + WEB_REQUEST_SUFFIX + PUB_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "uploadfunc/post" + WEB_REQUEST_SUFFIX + PUB_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "uploadfunc/get" + WEB_REQUEST_SUFFIX + PUB_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "uploadfunc/put" + WEB_REQUEST_SUFFIX + PUB_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "uploadfunc/delete/put" + WEB_REQUEST_SUFFIX + SUB_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "downloadfunc/post" + WEB_REQUEST_SUFFIX + SUB_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "downloadfunc/get" + WEB_REQUEST_SUFFIX + SUB_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "downloadfunc/put" + WEB_REQUEST_SUFFIX + SUB_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "downloadfunc/delete/put" + WEB_REQUEST_SUFFIX + GEN_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "genericfunc/post" + WEB_REQUEST_SUFFIX + GEN_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "genericfunc/get" + WEB_REQUEST_SUFFIX + GEN_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "genericfunc/put" + WEB_REQUEST_SUFFIX + GEN_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "genericfunc/delete/put" + WEB_REQUEST_SUFFIX + ERLANG_BIND_INFO = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "erlang/bind/info" + WEB_REQUEST_SUFFIX + ERLANG_BIND_GROUP_INFO = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "erlang/bind/group" + WEB_REQUEST_SUFFIX + IMPORT_CONFIG = WEB_REQUEST_PREFIX + "/v1/files/import/" + AppInfo.APP_NAME + WEB_REQUEST_SUFFIX + EXPORT_CONFIG = WEB_REQUEST_PREFIX + "/v1/files/export/" + AppInfo.APP_NAME + WEB_REQUEST_SUFFIX + WEB_RESPONSE_PREFIX = "httprsp" + MODBUS_TCP_SLAVE_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "protocol/server/get" + WEB_REQUEST_SUFFIX + MODBUS_TCP_SLAVE_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "protocol/server/put" + WEB_REQUEST_SUFFIX + MODBUS_TCP_ADDR_MAP_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "modbus/mapping/table/post" + WEB_REQUEST_SUFFIX + MODBUS_TCP_ADDR_MAP_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "modbus/mapping/table/get" + WEB_REQUEST_SUFFIX + MODBUS_TCP_ADDR_MAP_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "modbus/mapping/table/put" + WEB_REQUEST_SUFFIX + MODBUS_TCP_ADDR_MAP_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "modbus/mapping/table/delete/put" + MODBUS_TCP_ADDR_MAP_DEL = MODBUS_TCP_ADDR_MAP_DEL + WEB_REQUEST_SUFFIX + IEC104_SERVER_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "iec104/server/get" + WEB_REQUEST_SUFFIX + IEC104_SERVER_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "iec104/server/put" + WEB_REQUEST_SUFFIX + IEC104_SERVER_ADDR_MAP_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "iec104/mapping/table/post" + WEB_REQUEST_SUFFIX + IEC104_SERVER_ADDR_MAP_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "iec104/mapping/table/get" + WEB_REQUEST_SUFFIX + IEC104_SERVER_ADDR_MAP_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "iec104/mapping/table/put" + WEB_REQUEST_SUFFIX + IEC104_SERVER_ADDR_MAP_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "iec104/mapping/table/delete/put" + IEC104_SERVER_ADDR_MAP_DEL = IEC104_SERVER_ADDR_MAP_DEL + WEB_REQUEST_SUFFIX + OPCUA_SERVER_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "opcua/server/get" + WEB_REQUEST_SUFFIX + OPCUA_SERVER_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "opcua/server/put" + WEB_REQUEST_SUFFIX + OPCUA_SERVER_ADDR_MAP_ADD = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "opcua/mapping/table/post" + WEB_REQUEST_SUFFIX + OPCUA_SERVER_ADDR_MAP_GET = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "opcua/mapping/table/get" + WEB_REQUEST_SUFFIX + OPCUA_SERVER_ADDR_MAP_DEL = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "opcua/mapping/table/delete/put" + OPCUA_SERVER_ADDR_MAP_DEL = OPCUA_SERVER_ADDR_MAP_DEL + WEB_REQUEST_SUFFIX + CLEAR_OFFLINE_DATA_PUT = WEB_REQUEST_PREFIX + WEB_REQUEST_MIDDLE + "empty/offlinedata/put" + WEB_REQUEST_SUFFIX diff --git a/APPS_UNCOMPILED/src/common/Controller.py b/APPS_UNCOMPILED/src/common/Controller.py new file mode 100644 index 0000000..91fbee6 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/Controller.py @@ -0,0 +1,91 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/Controller.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2537 bytes +""" +Controller +Created on 2020/11/13 +@author: Zhengyb +""" +import copy +import common.Protocol as Protocol + +class Controller(object): + + def __init__(self, name="", protocol=Protocol.MbTcp): + self.name = name + self.protocol = protocol + self.endpoint = "" + self.args = dict() + self.samplePeriod = 3 + self.expired = 1000 + self.measure_points = list() + + def add_measure_point(self, point): + point.ctrlName = self.name + self.measure_points.append(point) + + def del_measure_point(self, point): + self.measure_points.remove(point) + + def find_measure_point(self, point_name): + for point in self.measure_points: + if point.name == point_name: + return point + + def __loads_validate(self, json_obj): + return True + + def load_json_obj(self, json_obj): + if not self._Controller__loads_validate(json_obj): + raise ValueError(json_obj) + self.name = json_obj["name"] + self.protocol = json_obj["protocol"] + self.endpoint = json_obj["endpoint"] + self.samplePeriod = json_obj["samplePeriod"] + self.expired = json_obj["expired"] + self.args = json_obj["args"] + + def dump_json_obj(self): + json_obj = dict() + json_obj["name"] = self.name + json_obj["protocol"] = self.protocol + json_obj["endpoint"] = self.endpoint + json_obj["args"] = copy.deepcopy(self.args) + json_obj["samplePeriod"] = self.samplePeriod + json_obj["expired"] = self.expired + return json_obj + + +class ModbusRtuController(Controller): + + def __init__(self, name): + Controller.__init__(self, name, Protocol.MbRtu) + self.samplePeriod = 10 + + +class ModbusTcpController(Controller): + + def __init__(self, name): + Controller.__init__(self, name, Protocol.MbTcp) + + +class OpcUaController(Controller): + + def __init__(self, name): + Controller.__init__(self, name, Protocol.OpcUa) + + +class Snap7Controller(Controller): + + def __init__(self, name): + Controller.__init__(self, name, Protocol.Snap7) + + +class EIPController(Controller): + + def __init__(self, name): + Controller.__init__(self, name, Protocol.Eip) diff --git a/APPS_UNCOMPILED/src/common/DataType.py b/APPS_UNCOMPILED/src/common/DataType.py new file mode 100644 index 0000000..07bba16 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/DataType.py @@ -0,0 +1,85 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/DataType.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2768 bytes +""" +DataType +Created on 2020/12/01 +@author: Zhengyb +""" +Bool = "BOOL" +Bit = "BIT" +Byte = "BYTE" +Sint = "SINT" +Word = "WORD" +Int = "INT" +Dword = "DWORD" +Dint = "DINT" +Float = "FLOAT" +String = "STRING" +Bcd = "BCD" +Ulong = "ULONG" +Long = "LONG" +Double = "DOUBLE" +Bcd32 = "BCD32" +OPCUA_DATA_TYPE_MAPS = { + 'Boolean': '"BOOL"', + 'SByte': '"SINT"', + 'Byte': '"BYTE"', + 'Int16': '"INT"', + 'UInt16': '"WORD"', + 'Int32': '"DINT"', + 'UInt32': '"DWORD"', + 'Int64': '"STRING"', + 'UInt64': '"STRING"', + 'Float': '"FLOAT"', + 'Double': '"FLOAT"', + 'String': '"STRING"', + 'Null': '"STRING"', + 'DateTime': '"STRING"', + 'Guid': '"STRING"', + 'ByteString': '"STRING"', + 'XmlElement': '"STRING"', + 'NodeId': '"STRING"', + 'ExpandedNodeId': '"STRING"', + 'StatusCode': '"STRING"', + 'QualifiedName': '"STRING"', + 'LocalizedText': '"STRING"', + 'ExtensionObject': '"STRING"', + 'DataValue': '"STRING"', + 'Variant': '"STRING"', + 'DiagnosticInfo': '"STRING"'} +ENIP_DATA_TYPE_MAPS = { + 'BOOL': '"BOOL"', + 'SINT': '"SINT"', + 'INT': '"INT"', + 'DINT': '"DINT"', + 'LINT': '"STRING"', + 'USINT': '"BYTE"', + 'UINT': '"WORD"', + 'UDINT': '"DWORD"', + 'ULINT': '"STRING"', + 'REAL': '"FLOAT"', + 'LREAL': '"FLOAT"', + 'STIME': '"STRING"', + 'DATE': '"STRING"', + 'TIME_OF_DAY': '"STRING"', + 'DATE_AND_TIME': '"STRING"', + 'STRING': '"STRING"', + 'BYTE': '"STRING"', + 'WORD': '"STRING"', + 'DWORD': '"STRING"', + 'LWORD': '"STRING"', + 'STRING2': '"STRING"', + 'FTIME': '"STRING"', + 'LTIME': '"STRING"', + 'ITIME': '"STRING"', + 'STRINGN': '"STRING"', + 'SHORT_STRING': '"STRING"', + 'TIME': '"STRING"', + 'EPATH': '"STRING"', + 'ENGUNIT': '"STRING"', + 'STRINGI': '"STRING"'} diff --git a/APPS_UNCOMPILED/src/common/ErlangAPI.py b/APPS_UNCOMPILED/src/common/ErlangAPI.py new file mode 100644 index 0000000..58fa6cf --- /dev/null +++ b/APPS_UNCOMPILED/src/common/ErlangAPI.py @@ -0,0 +1,122 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/ErlangAPI.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5552 bytes +import os, json, time, requests, base64, hashlib +import common.InternalTopic as InternalTopic +from common.ServiceID import ERLANG_MQTT_SERVICE_ID +from common.MQClient import MqttSetting +from common.InternalPath import SLAVE_SERVICE_CONIG_FILE_PATH as CONFIG_PATH +from common.InternalPath import ERLANG_CONNECT_STATUS, ERLANG_THING_NAME_PATH, BAIYING_THING_NAME_PATH +from common.CloudType import Erlang as CloudTypeErlang +from common.CloudType import BaiYing as CloudTypeBaiYing +from common.Logger import logger + +class ErlangAPI(object): + + def __init__(self, client, src_server_id): + """ + :param client: + :param src_server_id: + :return: + """ + self.local_client = client + self.src_service_id = src_server_id + + def publish(self, topic, payload="", task_id=int(time.time()), cache=False): + payload = str(base64.b64encode(payload.encode("utf-8")), "utf-8") + wrap_payload = { + 'topic': topic, + 'payload': payload, + 'task_id': task_id, + 'cache': cache, + 'qos': 1} + data = json.dumps(wrap_payload) + upstram_topic = InternalTopic.EVENT_BUS_REMOTE_UPSTREAM_PUBLISH.format(agentServiceId=ERLANG_MQTT_SERVICE_ID, srcServiceId=(self.src_service_id)) + self.local_client.publish(upstram_topic, data, qos=(MqttSetting.MQTT_QOS_LEVEL)) + logger.debug("Erlang agent PUB: %s, data: %s" % (upstram_topic, data)) + + def subscribe(self, topic, callback=None): + payload = {'topic':topic, + 'qos':1} + data = json.dumps(payload) + sub_topic = InternalTopic.EVENT_BUS_REMOTE_SUBSCRIBES.format(agentServiceId=ERLANG_MQTT_SERVICE_ID, srcServiceId=(self.src_service_id)) + self.local_client.publish(sub_topic, data, qos=(MqttSetting.MQTT_QOS_LEVEL)) + logger.debug("Erlang agent SUB: %s, data: %s" % (sub_topic, data)) + + def unsubscribe(self, topic): + payload = {'topic':topic, + 'qos':1} + data = json.dumps(payload) + unsub_topic = InternalTopic.EVENT_BUS_REMOTE_UNSUBSCRIBES.format(agentServiceId=ERLANG_MQTT_SERVICE_ID, srcServiceId=(self.src_service_id)) + self.local_client.publish(unsub_topic, data, qos=(MqttSetting.MQTT_QOS_LEVEL)) + logger.debug("send unsub request topic: %s, payload: %s" % (topic, payload)) + + def unsubscribe_all(self): + unsub_topic = InternalTopic.EVENT_BUS_ERLANG_UNSUBSCRIBES_ALL + self.local_client.publish(unsub_topic, "", qos=(MqttSetting.MQTT_QOS_LEVEL)) + logger.debug("send sub request topic: %s" % (unsub_topic,)) + + def delete(self): + self.unsubscribe_all() + + def upload_southconfig(self, method, url, expired, data=''): + method = method.lower() + if method == "put": + response = requests.put(url, data=(json.dumps(data)), timeout=expired) + else: + response = requests.post(url, data=(json.dumps(data)), timeout=expired) + logger.debug("Upload southconfig response status code:%s" % response.status_code) + + def download_southconfig(self, method, url, expired, filename): + method = method.lower() + if method != "get": + logger.warn("Unsupported Method:%s, default to use GET" % method) + response = requests.get(url, timeout=expired) + southconfig = json.loads(response.content) + logger.debug("Download southconfig response status code:%s" % response.status_code) + if not os.path.exists(CONFIG_PATH): + os.makedirs(CONFIG_PATH) + file_path = CONFIG_PATH + filename + with open(file_path, "w", encoding="utf-8") as fp: + json.dump(southconfig, fp, indent=1, ensure_ascii=False) + return southconfig + + def _generate_md5(self, data=None): + data = str(data) + md5_obj = hashlib.md5() + byte_data = data.encode(encoding="utf-8") + md5_obj.update(byte_data) + str_md5 = md5_obj.hexdigest() + return str_md5 + + def read_erlang_connect_status_file(self): + """ + + :return: True if is connected with Erlang cloud else False + """ + is_connected = False + if os.path.exists(ERLANG_CONNECT_STATUS): + with open(ERLANG_CONNECT_STATUS, "r", encoding="utf-8") as f: + json_data = json.load(f) + is_connected = True if json_data["status"] == 2 else False + return is_connected + + def read_erlang_thing_name(self, cloud_type): + if cloud_type == CloudTypeErlang: + THING_NAME_PATH = ERLANG_THING_NAME_PATH + else: + if cloud_type == CloudTypeBaiYing: + THING_NAME_PATH = BAIYING_THING_NAME_PATH + else: + return + if os.path.exists(THING_NAME_PATH): + with open(THING_NAME_PATH, "r", encoding="utf-8") as f: + thing_name = f.read() + return thing_name + else: + logger.warn("not found Erlang ThingName file") + return diff --git a/APPS_UNCOMPILED/src/common/Error.py b/APPS_UNCOMPILED/src/common/Error.py new file mode 100644 index 0000000..edd9826 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/Error.py @@ -0,0 +1,46 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/Error.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2576 bytes + + +class RequestError(object): + ERROR_KEYS = [ + "result", "error_code", "error"] + ERROR_KEYS_WITH_PARAMS = [ + "result", "error_code", "error", "params"] + ERROR_VALUES_DICT = { + -4: ('error', -4, 'Interrupted system call'), + -5: ('error', -5, 'I/O error'), + -1003: ('error', -1003, 'Invalid arguments '), + -11000: ('error', -11000, 'Repeated name'), + -11003: ('error', -11003, 'Only supported one RS232 device'), + -11006: ('error', -11006, 'Repeated slave address for 485 device'), + -11009: ('error', -11009, 'Repeated remote_tsap for the same IP and port'), + -11021: ('error', -11021, 'Repeated settings for the same protocol'), + -11014: ('error', -11014, 'Device does not exist'), + -11025: ('error', -11025, 'File is empty')} + ERROR_VALUES_DICT_WITH_PARAMS = {-11001:('error', -11001, 'Group is used by Vars', ''), + -11002:( + "error", -11002, "Group is used by Cloud Service", {'group':"", 'cloud_type':""}), + -11004:('error', -11004, 'At most supported %s devices', ''), + -11005:('error', -11005, 'Group is used by Warnings', ''), + -11007:('error', -11007, 'At most supported %s variables for each device', ''), + -11008:('error', -11008, 'Invalid csv file', ''), + -11010:('error', -11010, 'At most supported %s groups', ''), + -11011:('error', -11011, 'At most supported %s alarms', ''), + -11012:('error', -11012, 'Invalid function_name/script', ''), + -11013:('error', -11013, 'Group does not exist', ''), + -11015:('error', -11015, 'Variable does not exist', ''), + -11016:('error', -11016, 'Device %s is not accessible', ''), + -11017:('error', -11017, 'The string exceeds the maximum length limit of %d characters', ''), + -11018:('error', -11018, 'The variable %s does not support write operations.', ''), + -11019:('error', -11019, 'Invalid value', ''), + -11020:('error', -11020, 'Unexpected failure', ''), + -11022:('error', -11022, 'Repeated measure point mapping', ''), + -11023:('error', -11023, 'Overlapping address mapping', ''), + -11024:('error', -11024, 'Saving the configuration', ''), + -11026:('error', -11026, 'AlarmLabel does not exist', '')} diff --git a/APPS_UNCOMPILED/src/common/InDB.py b/APPS_UNCOMPILED/src/common/InDB.py new file mode 100644 index 0000000..c04a572 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/InDB.py @@ -0,0 +1,653 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/InDB.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 21824 bytes +""" +Inhand DB + +Created on 2018/03/12 +@author: Zhengyb + +Features Supported: +- Simple key-value table that accessed by single process/thread. + Key must be string type While Values could be any pickable objects. +- Simple time-series table. support fuzzy search. +- Database that supports multi applications access with single writer and several reader. + +TODO List: +- Store database files to flash filesystem periodly or by requestd +- Database recover methods while system failed +- Databases access that support transaction +""" +import os, sys, time, json, pickle, platform, bsddb3 +from .Logger import logger +from functools import wraps +if sys.version > "3": + from configparser import ConfigParser +else: + from ConfigParser import ConfigParser +if "x86" in platform.machine(): + DB_HOME_BASE = "/tmp/dbhome" +else: + DB_HOME_BASE = "/var/user/data/dbhome" +DB_FILE_SURFIX = ".tbl" +DB_META_TBL_SURFIX = ".meta" +DB_TYP_PRIVATE = 0 +DB_TYP_SHARED = 1 +DB_ROL_OWNER = 0 +DB_ROL_WRITER = 1 +DB_ROL_READER = 2 +DB_INIT_STAGE_NONE = 0 +DB_INIT_STAGE_CREATE = 1 +DB_INIT_STAGE_READY = 2 +TBL_TYP_KV = 0 +TBL_TYP_TS = 1 +TS_FUZZY_NONE = 0 +TS_FUZZY_BACKWORD = 1 +TS_FUZZY_FORWORD = 2 +TS_FUZZY_BACK_FOR = 3 +TS_FUZZY_FOR_BACK = 4 +TS_FUZZY_CLOSEST = 5 + +def Second(n=1): + return int(n) + + +def Minute(n=1): + return int(n * Second(60)) + + +def Hour(n=1): + return int(n * Minute(60)) + + +def Day(n=1): + return int(n * Hour(24)) + + +def Week(n=1): + return int(n * Day(7)) + + +if "x86" in platform.machine(): + APP_PATH = "/home/zhengyb/github/DSM" +else: + APP_PATH = "/var/app" +DMCLIENT_PATH = "/var/pycore/bin/DmClient" + +def _parse_db_reg_file(app_name, filename, db_base): + parser = ConfigParser() + parser.read(filename) + for section in parser.sections(): + if section.startswith("db:"): + db_name = section[3[:None]] + if not db_base.has_key(db_name): + db = dict() + db["name"] = db_name + if parser.has_option(section, "type"): + db["type"] = parser.getint(section, "type") + else: + db["type"] = 1 + db["apps"] = list() + db["apps"].append(app_name) + db["tables"] = dict() + db_base[db_name] = db + else: + db = db["tables"][db_name] + db["apps"].append(app_name) + + +class DBMaster(object): + __doc__ = "docstring for DBMaster" + + def __init__(self): + self.db_base = dict() + self.db_instances = dict() + + def collect_databases(self): + db_conf_file = DMCLIENT_PATH + "/db.ini" + if os.path.exists(db_conf_file): + try: + _parse_db_reg_file("DmClient", db_conf_file, self.db_base) + except Exception as e: + try: + logger.exception("%s" % e.__str__()) + finally: + e = None + del e + + apps = os.listdir(APP_PATH) + for i in range(0, len(apps)): + if apps[i] == "lib" or apps[i] == "cfg" or apps[i].startswith("."): + continue + app_abs_path = os.path.join(APP_PATH, apps[i]) + logger.info("get app path: %s" % app_abs_path) + if os.path.isdir(app_abs_path): + db_conf_file = app_abs_path + "/db.ini" + if os.path.exists(db_conf_file): + try: + _parse_db_reg_file(apps[i], db_conf_file, self.db_base) + except Exception as e: + try: + logger.exception("%s" % e.__str__()) + finally: + e = None + del e + + def init_database(self): + for db_name in self.db_base.keys(): + db_info = self.db_base[db_name] + db = InDB(dbType=DB_TYP_SHARED, + dbRole=DB_ROL_OWNER) + db.open(db_info["name"]) + for tbl_name in db_info["tables"].keys(): + tbl_info = db_info["tables"][tbl_name] + db.get_table(tbl_name, + tblType=(tbl_info["type"]), timeSpan=(tbl_info["timeSpan"])) + + +class TimeSeriesObject(object): + __doc__ = "Simple Time Series Class Object" + + def __init__(self, timestamp=-1): + if timestamp >= 0: + self.timestamp = int(timestamp) + else: + self.timestamp = int(time.time()) + + def json_dumps(self): + return json.dumps(self.__dict__) + + +def str_to_bytes(f): + + @wraps(f) + def _func(self, key, *args, **kwargs): + if isinstance(key, str): + if sys.version > "3": + key = key.encode(encoding="utf8") + return f(self, key, *args, **kwargs) + + return _func + + +class KeyValueTable(bsddb3._DBWithCursor): + __doc__ = "docstring for KeyValueTable class" + + def _key_format(self, key): + return key + + def _key_unformat(self, low_key): + return low_key + + def __getitem__(self, key): + low_key = self._key_format(key) + low_value = bsddb3._DBWithCursor.__getitem__(self, low_key) + return pickle.loads(low_value) + + def __setitem__(self, key, value): + low_key = self._key_format(key) + low_value = pickle.dumps(value) + bsddb3._DBWithCursor.__setitem__(self, low_key, low_value) + + def __delitem__(self, key): + low_key = self._key_format(key) + bsddb3._DBWithCursor.__delitem__(self, low_key) + + @str_to_bytes + def put(self, key, value, autoSync=True): + self[key] = value + if autoSync: + self.sync() + + @str_to_bytes + def get(self, key): + if self.has_key(key): + return self[key] + + @str_to_bytes + def get_item(self, key): + if self.has_key(key): + return (key.decode(), self[key]) + + @str_to_bytes + def delete(self, key): + if self.has_key(key): + del self[key] + self.sync() + return True + return False + + @str_to_bytes + def pop(self, key): + res = None + if self.has_key(key): + res = self[key] + del self[key] + self.sync() + return res + + def keys(self): + low_keys = bsddb3._DBWithCursor.keys(self) + keys = list() + for low_key in low_keys: + res_key = self._key_unformat(low_key) + if sys.version > "3": + try: + keys.append(res_key.decode()) + except Exception as e: + try: + logger.warn("Invalid key: %s, %s" % (res_key, e)) + finally: + e = None + del e + + else: + keys.append(res_key) + + return keys + + def has_key(self, key): + low_key = self._key_format(key) + return bsddb3._DBWithCursor.has_key(self, low_key) + + def free_lock(self): + self._closeCursors() + + def set_location(self, key): + low_key = self._key_format(key) + low_key, low_value = bsddb3._DBWithCursor.set_location(self, low_key) + self._closeCursors() + key = self._key_unformat(low_key) + try: + value = pickle.loads(low_value) + except Exception as e: + try: + value = None + logger.warn("Invalid value. key: %s, %s" % (key, e)) + finally: + e = None + del e + + return ( + key, value) + + def next(self): + low_key, low_value = bsddb3._DBWithCursor.next(self) + self._closeCursors() + key = self._key_unformat(low_key) + try: + value = pickle.loads(low_value) + except Exception as e: + try: + value = None + logger.warn("Invalid value. key: %s, %s" % (key, e)) + finally: + e = None + del e + + return ( + key, value) + + def previous(self): + low_key, low_value = bsddb3._DBWithCursor.previous(self) + self._closeCursors() + key = self._key_unformat(low_key) + try: + value = pickle.loads(low_value) + except Exception as e: + try: + value = None + logger.warn("Invalid value. key: %s, %s" % (key, e)) + finally: + e = None + del e + + return ( + key, value) + + def first(self): + low_key, low_value = bsddb3._DBWithCursor.first(self) + self._closeCursors() + key = self._key_unformat(low_key) + try: + value = pickle.loads(low_value) + except Exception as e: + try: + value = None + logger.warn("Invalid value. key: %s, %s" % (key, e)) + finally: + e = None + del e + + return ( + key, value) + + def last(self): + low_key, low_value = bsddb3._DBWithCursor.last(self) + self._closeCursors() + key = self._key_unformat(low_key) + try: + value = pickle.loads(low_value) + except Exception as e: + try: + value = None + logger.warn("Invalid value. key: %s, %s" % (key, e)) + finally: + e = None + del e + + return ( + key, value) + + +class TimeSeriesTable(KeyValueTable): + __doc__ = "\n docstring for TimeSeriesTable\n Fix Me: __iter__() is not support yet.\n " + + def __init__(self, db, timeSpan=Day(1)): + KeyValueTable.__init__(self, db) + self.timeSpan = timeSpan + + def _key_format(self, key): + low_key = "{0:0>16}".format(key) + return low_key + + def _key_unformat(self, low_key): + return int(low_key) + + def put(self, tsObj, autoSync=True): + if not isinstance(tsObj, TimeSeriesObject): + raise ValueError("The given tsObj is not an instance of TimeSeriesObject") + self[tsObj.timestamp] = tsObj + if self.timeSpan > 0: + last = self.last() + first_allowed_timestamp = last[0] - self.timeSpan + if first_allowed_timestamp > 0: + this = self.set_location(first_allowed_timestamp) + try: + while True: + this = self.previous() + self.delete(this[0]) + + except Exception: + pass + + if autoSync: + self.sync() + + def get(self, timestamp, fuzzy=TS_FUZZY_FORWORD): + key, value = self.get_item(timestamp, fuzzy) + return (key, value) + + def get_item(self, timestamp, fuzzy=TS_FUZZY_FORWORD): + if fuzzy == TS_FUZZY_NONE: + return ( + timestamp, self[timestamp]) + if self.has_key(timestamp): + return ( + timestamp, self[timestamp]) + if fuzzy == TS_FUZZY_FORWORD: + item = self.set_location(timestamp) + self._closeCursors() + return item + if fuzzy == TS_FUZZY_BACKWORD: + try: + self.set_location(timestamp) + except Exception: + item = self.last() + self._closeCursors() + return item + else: + item = self.previous() + self._closeCursors() + return item + if fuzzy == TS_FUZZY_BACK_FOR: + try: + try: + self.set_location(timestamp) + except Exception: + item = self.last() + self._closeCursors() + return item + else: + item = self.previous() + self._closeCursors() + return item + except Exception: + item = self.set_location(timestamp) + self._closeCursors() + return item + + elif fuzzy == TS_FUZZY_FOR_BACK: + try: + item = self.set_location(timestamp) + self._closeCursors() + return item + except Exception: + try: + self.set_location(timestamp) + except Exception: + item = self.last() + self._closeCursors() + return item + else: + item = self.previous() + self._closeCursors() + return item + + else: + if TS_FUZZY_CLOSEST: + try: + nxt = self.set_location(timestamp) + except Exception: + item = self.last() + self._closeCursors() + return item + else: + try: + prv = self.previous() + except Exception: + self._closeCursors() + return nxt + else: + self._closeCursors() + if nxt[0] - timestamp <= timestamp - prv[0]: + return nxt + return prv + else: + raise ValueError("Unknown fuzzy type") + + +class _TableRuntimeMeta(object): + + def __init__(self, name, typ, timeSpan, flags, db, dbc): + self.name = name + self.typ = typ + self.timeSpan = timeSpan + self.flags = flags + self.db = db + self.dbc = dbc + + +class InDB(object): + __doc__ = "A simple key-value database that implemented with bsddb and pickle." + + def __init__(self, dbType=DB_TYP_PRIVATE, dbRole=DB_ROL_OWNER): + """ + arguments: + dbType: + - DB_TYP_PRIVATE: this DB is only accessed by the own process + itself. Note that any persistent DB will be accessed by + the system manage process. See the dbPersistent arg below. + - DB_TYP_SHARED: this DB is used to shard data with multiplie + processes. + dbRole: + - DB_ROL_OWNER: the present process own this database. It's + the owner's responsibility to create database and tables + before the others using them. For a private DB, the one + who open it is forced to be owner. + - DB_ROL_WRITER: the one who can write or read this database. + - DB_ROL_READER: the one who can only read this database. + """ + self.rtMeta = None + self.dbName = None + self.homedir = None + self.dbenv = None + self.dbType = dbType + self.dbRole = dbRole + if dbType == DB_TYP_PRIVATE: + dbRole = DB_ROL_OWNER + + def cleanUp(self): + """It is the master's responsibility to call + cleanUp() before the slaves using the database.""" + pass + + def _get_db_flags(self): + flags = bsddb3.db.DB_INIT_MPOOL + flags |= bsddb3.db.DB_THREAD + if self.dbType == DB_TYP_PRIVATE: + flags |= bsddb3.db.DB_PRIVATE + else: + if self.dbType == DB_TYP_SHARED: + flags |= bsddb3.db.DB_INIT_CDB + if self.dbRole == DB_ROL_OWNER: + flags |= bsddb3.db.DB_CREATE + return flags + + def _get_tbl_flags(self): + flags = bsddb3.db.DB_THREAD + if self.dbRole == DB_ROL_OWNER: + flags |= bsddb3.db.DB_CREATE + else: + if self.dbRole == DB_ROL_READER: + flags |= bsddb3.db.DB_RDONLY + return flags + + def _open(self): + if self.dbRole == DB_ROL_OWNER: + cmd = "rm %s/__db.* 2>/dev/null" % self.homedir + os.system(cmd) + dbenv = bsddb3.db.DBEnv() + dbenv.set_cachesize(0, 102400) + dbenv.set_lk_detect(bsddb3.db.DB_LOCK_DEFAULT) + dbenv.open(self.homedir, self._get_db_flags(), 438) + return dbenv + + def open(self, dbName): + self.dbName = dbName + homedir = DB_HOME_BASE + "/" + dbName + self.homedir = homedir + if self.dbRole == DB_ROL_OWNER and not os.path.exists(homedir): + try: + os.makedirs(homedir) + except Exception as e: + try: + print("mkdir error for database homedir. %s" % e) + finally: + e = None + del e + + self.dbenv = self._open() + self.rtMeta = dict() + else: + while not os.path.exists(homedir): + time.sleep(1) + + while not self.is_opened(): + try: + self.dbenv = self._open() + except Exception as e: + try: + print(e.__str__()) + time.sleep(1) + finally: + e = None + del e + + self.rtMeta = dict() + + def close(self): + if self.dbenv: + self.dbenv.close() + self.dbenv = None + + def is_opened(self): + return self.dbenv is not None + + def _create_table(self, name, tblTyp=TBL_TYP_KV, timeSpan=Day(1), tbl_class=None): + db = bsddb3.db.DB(self.dbenv) + db.set_flags(0) + db.open(name, bsddb3.db.DB_BTREE, self._get_tbl_flags(), 438) + if tblTyp == TBL_TYP_TS: + new_dbc = TimeSeriesTable(db, timeSpan) + else: + if tblTyp == TBL_TYP_KV: + new_dbc = KeyValueTable(db) + else: + new_dbc = tbl_class(db) + new_db_rt_meta = _TableRuntimeMeta(name, tblTyp, timeSpan, self._get_tbl_flags(), db, new_dbc) + self.rtMeta[name] = new_db_rt_meta + return new_dbc + + def get_table_lenght(self, tblName): + tbl = self.homedir + "/" + tblName + DB_FILE_SURFIX + if os.path.exists(tbl): + return os.path.getsize(tbl) + return 0 + + def get_table(self, tblName, tblType=TBL_TYP_KV, timeSpan=Day(1), tbl_class=None): + """ + Table concept is equal to the DB concept of bsddb3. + The timeSpan argument is only valid in a writer for time-series tables. + The persistent argument is only valid in the owner. + The tbl_class argument is for test need. + """ + if not self.is_opened(): + raise ValueError("InDB() is not opened yet.") + else: + name = tblName + DB_FILE_SURFIX + if name in self.rtMeta: + db_rt_meta = self.rtMeta[name] + try: + if tblType != db_rt_meta.typ: + raise ValueError("There is another table exist with the same name but different type.") + except Exception: + pass + + dbc = db_rt_meta.dbc + if dbc.isOpen(): + return dbc + return self._create_table(name, tblType, timeSpan, tbl_class) + else: + try: + db_rt_meta = self.rtMeta[name] + if tblType != db_rt_meta.typ: + raise ValueError("There is another table exist with the same name but different type.") + except Exception: + pass + + return self._create_table(name, tblType, timeSpan, tbl_class) + + def removeParse error at or near `COME_FROM' instruction at offset 140_0 + + def close_db(self, name): + if not self.is_opened(): + return + if name in self.rtMeta: + self.rtMeta[name].db.close() + + def rename(self, tblName): + name = tblName + DB_FILE_SURFIX + try: + homedir = DB_HOME_BASE + if self.dbName: + homedir = DB_HOME_BASE + "/" + self.dbName + db_table_file = homedir + "/" + name + if os.path.exists(db_table_file): + os.system("cp {0} {1}.{2}.bak".format(db_table_file, db_table_file, str(int(time.time())))) + except Exception: + pass \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/common/InternalPath.py b/APPS_UNCOMPILED/src/common/InternalPath.py new file mode 100644 index 0000000..cc6e69b --- /dev/null +++ b/APPS_UNCOMPILED/src/common/InternalPath.py @@ -0,0 +1,35 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/InternalPath.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1576 bytes +ENDPOINT_CONFILE_PREFIX = "/etc/python/ds2/" +SLAVE_SERVICE_CONIG_FILE_PATH = "/var/run/python/cfg/device_supervisor/" +CONFIG_TMP_PATH = "/tmp/files/export" +GROUP_CSV_FILE_NAME = "supervisor_group.csv" +OPCUA_PUB_SUB_GROUP_CSV_FILE_NAME = "opcua_pubsub_group.csv" +MEASURE_CSV_FILE_NAME = "supervisor_measure.csv" +CLOUD_CSV_FILE_NAME = "supervisor_cloud.csv" +CLOUD_MEASURE_CSV_FILE_NAME = "cloud_measure_point.csv" +ALARM_CSV_FILE_NAME = "supervisor_alarm.csv" +SLAVE_CSV_FILE_NAME = "supervisor_modbus_slave.csv" +IEC104_CSV_FILE_NAME = "supervisor_iec104.csv" +IEC101_CSV_FILE_NAME = "supervisor_iec101.csv" +SL651_CSV_FILE_NAME = "supervisor_sl651.csv" +HJ212_CSV_FILE_NAME = "supervisor_hj212.csv" +BACNET_CSV_FILE_NAME = "supervisor_bacnet.csv" +DNP3_CSV_FILE_NAME = "supervisor_dnp3.csv" +IEC61850_CSV_FILE_NAME = "supervisor_iec61850.csv" +SNMP_CSV_FILE_NAME = "supervisor_snmp.csv" +LABELS_CSV_FILE_NAME = "supervisor_lables.csv" +ALARMLABLES_CSV_FILE_NAME = "supervisor_alarmLables.csv" +GENFUNS_CSV_FILE_NAME = "supervisor_genfuns.csv" +APP_RUNNING_STATUS_PATH = "/var/run/python/ds2" +MQTT_AGENT_STATUS_FILE = APP_RUNNING_STATUS_PATH + "/{cloud_name}_status.json" +ERLANG_THING_NAME_PATH = "/var/backups/erlang/dev_pks/thing_name" +BAIYING_THING_NAME_PATH = "/var/backups/whiteeagle/dev_pks/thing_name" +ERLANG_CONNECT_STATUS = "/var/run/python/ds2/erlang_status.json" +VAR_RUN_PYTHON_DS2_PATH = "/var/run/python/ds2" +EC_SYSTEM_INFO = "/etc/altairiaos/system/system_info.json" diff --git a/APPS_UNCOMPILED/src/common/InternalTopic.py b/APPS_UNCOMPILED/src/common/InternalTopic.py new file mode 100644 index 0000000..9e0d7f5 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/InternalTopic.py @@ -0,0 +1,65 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/InternalTopic.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4789 bytes +""" +Drivers +Created on 2021/1/11 +@author: Lius +""" +import os + +class InternalTopic: + EVENT_BUS_PREFIX = "ds2/eventbus" + EVENT_BUS_SOUTH_PREFIX = EVENT_BUS_PREFIX + "/south" + EVENT_BUS_NORTH_PREFIX = EVENT_BUS_PREFIX + "/north" + EVENT_BUS_REMOTE_PREFIX = EVENT_BUS_PREFIX + "/remote" + EVENT_BUS_SYSTEM_PREFIX = EVENT_BUS_PREFIX + "/system" + EVENT_BUS_SYSTEM_CLOUD_CONN_NOICE = EVENT_BUS_SYSTEM_PREFIX + "/cloud/connection/notice" + EVENT_BUS_SYSTEM_SERVICE_STATUS_PING = EVENT_BUS_SYSTEM_PREFIX + "/service/status/request" + EVENT_BUS_SYSTEM_SERVICE_STATUS_PONG = EVENT_BUS_SYSTEM_PREFIX + "/service/status/response" + EVENT_BUS_SYSTEM_ERLANG_CONN_NOICE = EVENT_BUS_SYSTEM_PREFIX + "/erlang/connection/notice" + EVENT_BUS_SYSTEM_ALIYUN_UPDATE_CONFIG = EVENT_BUS_SYSTEM_PREFIX + "/aliyun/update/config" + EVENT_BUS_SYSTEM_GROUP_UPDATE_CONFIG = EVENT_BUS_SYSTEM_PREFIX + "/group/update/config" + EVENT_BUS_SYSTEM_GET_CONFIG = EVENT_BUS_SYSTEM_PREFIX + "/get/config" + EVENT_BUS_SYSTEM_UPDATE_CONFIG = EVENT_BUS_SYSTEM_PREFIX + "/update/config" + EVENT_BUS_SOUTH_READ = EVENT_BUS_SOUTH_PREFIX + "/read/{driverServiceId}" + EVENT_BUS_SOUTH_WRITE = EVENT_BUS_SOUTH_PREFIX + "/write/{requestServiceId}" + EVENT_BUS_SOUTH_WRITE_RESP = EVENT_BUS_SOUTH_PREFIX + "/write/{requestServiceId}/response" + EVENT_BUS_SOUTH_OPCUA_METHOD = EVENT_BUS_SOUTH_PREFIX + "/opcua/method" + EVENT_BUS_NORTH_MEASURES = EVENT_BUS_NORTH_PREFIX + "/measures/{groupName}" + EVENT_BUS_NORTH_MEASURES_ONCHANGE = EVENT_BUS_NORTH_PREFIX + "/changed/measures/{groupName}" + EVENT_BUS_NORTH_ALARM = EVENT_BUS_NORTH_PREFIX + "/alarm/{alarmName}" + EVENT_BUS_NORTH_WRITE_MEASURES = EVENT_BUS_NORTH_PREFIX + "/write/measures" + EVENT_BUS_NORTH_RECALL_MEASURES = EVENT_BUS_NORTH_PREFIX + "/recall/measures" + EVENT_BUS_NORTH_CTRL_STATUS = EVENT_BUS_NORTH_PREFIX + "/controllers/delta" + EVENT_BUS_REMOTE_SUBSCRIBES = EVENT_BUS_REMOTE_PREFIX + "/subscribes/{agentServiceId}/{srcServiceId}" + EVENT_BUS_REMOTE_UNSUBSCRIBES = EVENT_BUS_REMOTE_PREFIX + "/unsubscribes/{agentServiceId}/{srcServiceId}" + EVENT_BUS_ERLANG_UNSUBSCRIBES_ALL = EVENT_BUS_REMOTE_PREFIX + "/unsubscribes/all" + EVENT_BUS_REMOTE_UPSTREAM_PUBLISH = EVENT_BUS_REMOTE_PREFIX + "/upstream/publish/{agentServiceId}/{srcServiceId}" + EVENT_BUS_REMOTE_DOWNSTREAM_PUBLISH_PREFIX = EVENT_BUS_REMOTE_PREFIX + "/downstream/publish" + EVENT_BUS_REMOTE_DOWNSTREAM_PUBLISH = EVENT_BUS_REMOTE_PREFIX + "/downstream/publish/{agentServiceId}/{srcServiceId}" + EVENT_BUS_ERLANG_METADATA = "nezha/{thingName}/{cloud}/southconfig/metadata/notify" + EVENT_BUS_ERLANG_UPLOAD = "nezha/{thingName}/{cloud}/southconfig/upload/response" + EVENT_BUS_ERLANG_DOWNLOAD = "nezha/{thingName}/{cloud}/southconfig/download/response" + EVENT_BUS_ERLANG_DEVICE_INFO = "nezha/{thingName}/{cloud}/southconfig/device/info" + EVENT_BUS_ERLANG_REQUEST_SOUTH_CONFIG = "nezha/{thingName}/methods/request/upload-south-config" + EVENT_BUS_ERLANG_REQUEST_METADATA = "nezha/{thingName}/{cloud}/southconfig/metadata/get" + EVENT_BUS_ERLANG_REQUEST_UPLOAD = "nezha/{thingName}/{cloud}/southconfig/upload/request" + EVENT_BUS_ERLANG_REQUEST_DOWNLOAD = "nezha/{thingName}/{cloud}/southconfig/download/request" + EVENT_BUS_ERLANG_REQUEST_DEVICE_INFO = "nezha/{thingName}/{cloud}/southconfig/device/info/get" + EVENT_BUS_ERLANG_RESPONSE_SOUTH_CONFIG = "nezha/{thingName}/methods/response/upload-south-config" + LWTSDB_INSERT_REQUEST = "ds2/lwtsdb/insert/req/{table_name}/local-" + str(os.getpid()) + "/{operation_id}" + LWTSDB_INSERT_RESPONSE = "ds2/lwtsdb/insert/rsp/+/local-" + str(os.getpid()) + "/+" + LWTSDB_QUERY_REQUEST = "ds2/lwtsdb/query/req/{table_name}/local-" + str(os.getpid()) + "/{operation_id}" + LWTSDB_QUERY_RESPONSE = "ds2/lwtsdb/query/rsp/+/local-" + str(os.getpid()) + "/+" + LWTSDB_REMOVE_REQUEST = "ds2/lwtsdb/remove/req/{table_name}/local-" + str(os.getpid()) + "/{operation_id}" + LWTSDB_REMOVE_RESPONSE = "ds2/lwtsdb/remove/rsp/+/local-" + str(os.getpid()) + "/+" + DEBUG_LOG_PKGDBG_INFO = EVENT_BUS_PREFIX + "/pkgdbg/info/{CtrlName}" + DEBUG_LOG_PKGDBG_HEARTBEAT = EVENT_BUS_PREFIX + "/pkgdbg/heartbeat/{CtrlName}" + DATAHUB_RECALL_MEASURE = EVENT_BUS_PREFIX + "/south/recall/{CtrlName}/measures" + DATAHUB_RECALL_MEASURE_RESPONSE = EVENT_BUS_PREFIX + "/south/recall/{CtrlName}/measures/response" + DEBUG_LOG_PKGDBG_CONFIG = "httpreq/v1/apps/device/supervisor2/pkgdbg/{CtrlName}/put/#" diff --git a/APPS_UNCOMPILED/src/common/LocalSetting.py b/APPS_UNCOMPILED/src/common/LocalSetting.py new file mode 100644 index 0000000..f9d8917 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/LocalSetting.py @@ -0,0 +1,24 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/LocalSetting.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 580 bytes +import os + +def get_port(): + MQTT_BROKER_PORT = 1883 + port_file = "/var/run/python/mqtt_broker_local.port" + if os.path.exists(port_file): + try: + with open(port_file, "r") as f: + port_int = int(f.read()) + if port_int: + if port_int > 65535 or port_int < 0: + raise ValueError("The port should be 0~65535") + return port_int + except Exception: + pass + + return MQTT_BROKER_PORT diff --git a/APPS_UNCOMPILED/src/common/Logger.py b/APPS_UNCOMPILED/src/common/Logger.py new file mode 100644 index 0000000..b20686e --- /dev/null +++ b/APPS_UNCOMPILED/src/common/Logger.py @@ -0,0 +1,111 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/Logger.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4908 bytes +import logging, logging.handlers, sys, os, socket +debug_format = "%(asctime)s %(levelname)s [%(module)s]: %(message)s" +logfile_format = "%(asctime)s <%(levelname)s> [%(filename)s:%(lineno)d]: %(message)s" +LOGFILE_PREFIX = "/var/log/ds2/" + +class Logger(object): + + def __init__(self, name='root'): + self.name = name + self.console_handler = None + self.file_handler = None + self.debug_feature = False + logging.basicConfig(stream=(sys.stdout), format=debug_format) + self._logger = logging.getLogger(self.name) + self.level = logging.DEBUG + self._logger.setLevel(self.level) + self.debug = self._logger.debug + self.info = self._logger.info + self.error = self._logger.error + self.exception = self._logger.exception + self.warn = self._logger.warning + self.warning = self._logger.warning + self.filename = None + self.maxbytes = 1048576 + self.app_name = "unknown" + + def logger_init(self, app_name='app', debug=False): + """ + 输出DEBUG及以上日志到内存日志文件/var/log/ds2/app.log; + 输出INFO及以上日志到UDP syslog; + 仅当debug==True时,输出日志到console; + """ + self.app_name = app_name + self.set_syslog() + self.enable_console_log(debug) + logfile = app_name + ".log" + self.set_logfile(logfile, 2) + + def set_syslog(self, address=('127.0.0.1', 514)): + if self.syslog_handler: + self.error("The syslog handler can only be set for one time.") + return + self.syslog_handler = logging.handlers.SysLogHandler(address=address, socktype=(socket.SOCK_DGRAM)) + self.syslog_handler.setLevel(logging.INFO) + syslog_format = "%(asctime)s <%(levelname)s> " + self.app_name + "[%(process)d]: %(message)s" + formatter = logging.Formatter(syslog_format) + self.syslog_handler.setFormatter(formatter) + self._logger.addHandler(self.syslog_handler) + + def set_logfile(self, filename='test.log', logsize=1): + if self.file_handler: + self.error("The logfile can only be set for one time.") + return + os.makedirs(LOGFILE_PREFIX, 493, exist_ok=True) + self.filename = LOGFILE_PREFIX + filename + self.maxbytes = 1048576 * logsize + self.file_handler = logging.handlers.RotatingFileHandler((self.filename), + mode="a", maxBytes=(self.maxbytes), backupCount=1) + self.file_handler.setLevel(logging.DEBUG) + formatter = logging.Formatter(logfile_format) + self.file_handler.setFormatter(formatter) + self._logger.addHandler(self.file_handler) + + def enable_console_log(self, enable): + """ + console日志只用于开发调试,该函数应该在set_level()之后调用 + """ + if enable: + if self.console_handler: + return + self.console_handler = logging.StreamHandler() + self.console_handler.setLevel(logging.DEBUG) + formatter = logging.Formatter(debug_format) + self.console_handler.setFormatter(formatter) + self._logger.addHandler(self.console_handler) + else: + if not self.console_handler: + return + self._logger.removeHandler(self.console_handler) + if self.level > logging.DEBUG: + self._logger.setLevel(self.level) + + def set_level(self, level=logging.INFO): + if isinstance(level, str): + if level == "INFO": + level = logging.INFO + else: + if level == "DEBUG": + level = logging.DEBUG + else: + if level == "WARNING": + level = logging.WARNING + else: + if level == "ERROR": + level = logging.ERROR + else: + level = logging.INFO + self.level = level + self._logger.setLevel(level) + if self.file_handler: + self.file_handler.setLevel(level) + + +logger = Logger(name="ds2") diff --git a/APPS_UNCOMPILED/src/common/MQClient.py b/APPS_UNCOMPILED/src/common/MQClient.py new file mode 100644 index 0000000..60ec37f --- /dev/null +++ b/APPS_UNCOMPILED/src/common/MQClient.py @@ -0,0 +1,524 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/MQClient.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 19241 bytes +""" +MQClient +Created on 2017/12/26 +@author: Zhengyb +""" +import ssl, time +import paho.mqtt.client as mqtt +from socket import gaierror +from .Logger import logger +import socket, libevent +from .LocalSetting import get_port +MQ_NOT_READY = 0 +MQ_READY = 1 + +class MqttSetting(object): + MQTT_QOS_LEVEL = 0 + + +class LocalBrokerBadConfError(ValueError): + pass + + +class LocalPublishError(ValueError): + pass + + +class MQClient(object): + + def __init__(self, client_id, broker_host="127.0.0.1", broker_port=get_port(), username=None, passwd=None, keepalive=60, tls=False, capath=None, max_queue_size=1024, clean_session=None, userdata=None, protocol=mqtt.MQTTv311, after_connect=None, on_connected=None, on_disconnected=None): + self.client_id = client_id + self.broker_host = broker_host + self.broker_port = broker_port + self.keepalive = keepalive + self.username = username + self.passwd = passwd + self.tls = tls + self.capath = capath + self.clean_session = clean_session + self.userdata = userdata + self.protocol = protocol + self.mqtt_client = mqtt.Client((self.client_id), clean_session=(self.clean_session), userdata=(self.userdata), + protocol=(self.protocol)) + self.mqtt_client.on_connect = self._on_connect + self.mqtt_client.on_disconnect = self._on_disconnect + self.mqtt_client.on_publish = self._on_publish + self.mqtt_client.on_message = self._on_message + self.max_queue_size = max_queue_size + self.mqtt_client.max_queued_messages_set(self.max_queue_size) + self.mqtt_client.max_inflight_messages_set(1) + self._on_connected = on_connected + self._on_disconnected = on_disconnected + self._after_connect = after_connect + self._state = MQ_NOT_READY + self.subs = dict() + self.pub_acks = dict() + self.pub_topic_cbs = dict() + + def connect(self): + """This function should be called once after MQClient object is created""" + try: + if self.username: + if self.passwd: + self.mqtt_client.username_pw_set(self.username, self.passwd) + if self.tls is True: + if self.capath: + self.mqtt_client.tls_set(ca_certs=(self.capath), tls_version=(ssl.PROTOCOL_TLSv1_2)) + ret = self.mqtt_client.connect(self.broker_host, self.broker_port, self.keepalive) + if self._after_connect is not None: + self._after_connect(self.mqtt_client) + return ret + except Exception as e: + try: + logger.error("connect. %s" % e.__str__()) + finally: + e = None + del e + + def reconnect(self): + try: + self.mqtt_client.reconnect() + if self._after_connect is not None: + self._after_connect(self.mqtt_client) + except gaierror as e: + try: + time.sleep(5) + raise e + finally: + e = None + del e + + except ValueError as e: + try: + logger.warn("reconnect -> connect. %s" % e.__str__()) + self.connect() + finally: + e = None + del e + + except socket.error as se: + try: + logger.warn("reconnect -> connect. %s" % se.__str__()) + self.mqtt_client = mqtt.Client((self.client_id), clean_session=(self.clean_session), userdata=(self.userdata), + protocol=(self.protocol)) + self.mqtt_client.on_connect = self._on_connect + self.mqtt_client.on_disconnect = self._on_disconnect + self.mqtt_client.on_publish = self._on_publish + self.mqtt_client.on_message = self._on_message + self.mqtt_client.max_queued_messages_set(self.max_queue_size) + self.mqtt_client.max_inflight_messages_set(1) + self.connect() + finally: + se = None + del se + + except Exception as e: + try: + logger.error("reconnect. %s (host: %s, port: %s)" % (e.__str__(), self.broker_host, self.broker_port)) + finally: + e = None + del e + + def disconnect(self): + return self.mqtt_client.disconnect() + + def loop(self): + """This function could be called in a while True loop""" + try: + return self.mqtt_client.loop() + except socket.error as err: + try: + logger.error("loop error %s" % err.__str__()) + self._on_disconnect(self.mqtt_client, None, mqtt.MQTT_ERR_NO_CONN) + finally: + err = None + del err + + return 404 + + def loop_misc(self): + """This function could be called every some seconds to handle + retry and ping""" + try: + self.mqtt_client.loop_misc() + except socket.error as err: + try: + logger.error("loop error %s" % err.__str__()) + self._on_disconnect(self.mqtt_client, None, mqtt.MQTT_ERR_NO_CONN) + finally: + err = None + del err + + def loop_read(self): + """This function could be called while the read IO is valid""" + self.mqtt_client.loop_read() + + def loop_write(self): + """This function could be called while the write IO is valid""" + if self.mqtt_client.want_write(): + self.mqtt_client.loop_write() + + def socket(self): + return self.mqtt_client.socket() + + def get_state(self): + return self._state + + def is_ready(self): + return self._state == MQ_READY + + def add_sub(self, topic, callback, qos=MqttSetting.MQTT_QOS_LEVEL): + """ + This function should be call after MQClient object is created + callback is a function(payload) + """ + d = dict() + d["callback"] = callback + d["qos"] = qos + self.subs[topic] = d + if self._state == MQ_READY: + qos = self.subs[topic]["qos"] + logger.debug("key %s, value %s" % (topic, qos)) + self.mqtt_client.subscribe(topic, qos) + + def del_sub(self, topic): + if topic in self.subs: + del self.subs[topic] + if self._state == MQ_READY: + self.mqtt_client.unsubscribe(topic) + + def set_on_log(self, on_log): + self.mqtt_client.on_log = on_log + + def add_pub_topic_callback(self, topic, on_publish_callback): + self.pub_topic_cbs[topic] = on_publish_callback + + def del_pub_topic_callback(self, topic): + if topic in self.pub_topic_cbs.keys(): + del self.pub_topic_cbs[topic] + + def publish(self, topic, payload, qos=MqttSetting.MQTT_QOS_LEVEL, userdata=None): + if self.get_state() == MQ_READY: + try: + mqttc_msg_info = self.mqtt_client.publish(topic, payload, qos) + if mqttc_msg_info.rc is mqtt.MQTT_ERR_QUEUE_SIZE: + logger.info("publish return warning: %d(%s)" % ( + mqttc_msg_info.rc, "local queue overflow")) + for _ in range(0, mqtt.MQTT_ERR_QUEUE_SIZE): + if self.loop() > 0: + raise socket.error("Unknown\xa0exception...") + + else: + if mqttc_msg_info.rc is not mqtt.MQTT_ERR_SUCCESS and mqttc_msg_info.rc is not mqtt.MQTT_ERR_NO_CONN: + logger.error("publish() return error: %d(%s)" % ( + mqttc_msg_info.rc, + mqtt.error_string(mqttc_msg_info.rc))) + raise LocalPublishError("Local publish error, %s" % mqtt.error_string(mqttc_msg_info.rc)) + else: + if qos > 0: + if userdata is not None: + if topic in self.pub_topic_cbs: + d = dict() + d["topic"] = topic + d["userdata"] = userdata + self.pub_acks[mqttc_msg_info.mid] = d + return True + except socket.error as err: + try: + logger.error("publish() exception: %s topic %s payload %s" % ( + err.__str__(), topic, payload)) + self._on_disconnect(self.mqtt_client, None, mqtt.MQTT_ERR_NO_CONN) + finally: + err = None + del err + + except Exception as e: + try: + logger.error("publish() exception: %s topic %s payload %s" % ( + e.__str__(), topic, payload)) + finally: + e = None + del e + + return False + + def _subscribe_topics(self): + for topic in self.subs.keys(): + qos = self.subs[topic]["qos"] + logger.debug("key %s, value %s" % (topic, qos)) + self.mqtt_client.subscribe(topic, qos) + + def _on_connect(self, client, userdata, flags, rc): + logger.info("_on_connect: rc = %d." % rc) + if rc == mqtt.CONNACK_ACCEPTED: + self._state = MQ_READY + self._subscribe_topics() + if self._on_connected is not None: + self._on_connected(client) + elif rc == mqtt.CONNACK_REFUSED_SERVER_UNAVAILABLE: + if self._state == MQ_READY: + if self._on_disconnected is not None: + self._on_disconnected(client) + self.reconnect() + elif self._state == MQ_READY: + if self._on_disconnected is not None: + self._on_disconnected(client) + raise LocalBrokerBadConfError(mqtt.connack_string(rc)) + + def _on_disconnect(self, client, userdata, rc): + logger.debug("_on_disconnect: rc = %d." % (rc,)) + if rc == mqtt.MQTT_ERR_SUCCESS: + logger.warn("disconnected on disconnect() call") + self._state = MQ_NOT_READY + if self._on_disconnected is not None: + self._on_disconnected(client) + else: + self._state = MQ_NOT_READY + if self._on_disconnected is not None: + self._on_disconnected(client) + self.reconnect() + + def _on_subscribe(self, client, userdata, mid, granted_qos): + pass + + def _on_publish(self, client, userdata, mid): + if mid in self.pub_acks.keys(): + topic = self.pub_acks[mid]["topic"] + if topic is not None: + if topic in self.pub_topic_cbs: + callback = self.pub_topic_cbs[topic] + if callback is not None: + callback(self.pub_acks[mid]["topic"], self.pub_acks[mid]["userdata"]) + del self.pub_acks[mid] + + def _on_message(self, client, userdata, msg): + found = False + for sub in self.subs.keys(): + if mqtt.topic_matches_sub(sub, msg.topic): + found = True + callback = self.subs[sub]["callback"] + if callback is not None: + try: + callback(msg.topic, msg.payload) + except Exception as e: + try: + logger.warn("%s" % e.__str__()) + finally: + e = None + del e + + break + + if not found: + pass + + +class MQClientLibevent(object): + + def __init__(self, base, client_id): + self.base = base + self.mqclient = None + self.readEvt = None + self.writeEvt = None + self.client_id = client_id + self.target_host = "127.0.0.1" + self.target_port = get_port() + self.target_username = None + self.target_passwd = None + self.target_tls = None + self.target_capath = None + self.keepalive = 240 + self.clean_session = None + self.max_queue_size = 1024 + self.protocol = mqtt.MQTTv311 + self.enable_log = False + self.timer = 1 + self.mq_timer = libevent.Timer((self.base), + (self._mq_timer_handler), userdata=None) + + def set_mq_info(self, client_id, target_host="127.0.0.1", target_port=get_port(), target_username=None, target_passwd=None, keepalive=60, clean_session=None, target_tls=None, target_capath=None, protocol=mqtt.MQTTv311, max_queue_size=1024): + self.target_host = target_host + self.target_port = target_port + self.client_id = client_id + self.target_username = target_username + self.target_passwd = target_passwd + self.target_tls = target_tls + self.target_capath = target_capath + self.keepalive = keepalive + self.clean_session = clean_session + self.max_queue_size = max_queue_size + self.protocol = protocol + + def init_mqclient(self): + self.mqclient = MQClient((self.client_id), + broker_host=(self.target_host), + broker_port=(self.target_port), + username=(self.target_username), + passwd=(self.target_passwd), + keepalive=(self.keepalive), + tls=(self.target_tls), + capath=(self.target_capath), + max_queue_size=(self.max_queue_size), + clean_session=(self.clean_session), + protocol=(self.protocol), + after_connect=(self._after_connect), + on_connected=(self._on_connected), + on_disconnected=(self._on_disconnected)) + + def linkCheckout(self): + try: + s = socket.socket() + s.settimeout(1) + status = s.connect_ex((self.target_host, self.target_port)) + s.close() + if status == 0: + logger.debug("Network Connection OK.") + return True + except Exception as e: + try: + logger.debug("Link checkout except: %s" % e) + finally: + e = None + del e + + logger.warn("Network connection failed. host: %s, port: %s" % (self.target_host, self.target_port)) + return False + + def loop(self): + self.mqclient.loop() + + def connect(self): + if self.enable_log: + self.mqclient.set_on_log(self.on_log) + if self.linkCheckout(): + self.mqclient.connect() + self.mq_timer.add(self.timer) + + def disconnect(self): + self.mq_timer.delete() + self.mqclient.disconnect() + + def add_sub(self, topic, callback, qos=MqttSetting.MQTT_QOS_LEVEL): + self.mqclient.add_sub(topic, callback, qos) + + def del_sub(self, topic): + self.mqclient.del_sub(topic) + + def add_publish_callback(self, topic, on_publish_callback): + self.mqclient.add_pub_topic_callback(topic, on_publish_callback) + + def del_publish_callback(self, topic): + self.mqclient.del_pub_topic_callback(topic) + + def publish(self, topic, payload, qos=MqttSetting.MQTT_QOS_LEVEL, userdata=None): + res = self.mqclient.publish(topic, payload, qos, userdata=userdata) + if res: + self.writeEvt.add() + return res + + def is_ready(self): + return self.mqclient.is_ready() + + def _mq_timer_handler(self, evt, userdata): + timestamp = time.time() + try: + if self.readEvt is not None: + self.mqclient.loop_misc() + self.mqclient.loop_write() + else: + if self.linkCheckout(): + self.mqclient.reconnect() + else: + self.timer = 15 + except Exception as e: + try: + logger.error("%s" % e.__str__()) + finally: + e = None + del e + + use_time = time.time() - timestamp + self.mq_timer.add(abs(use_time) + self.timer) + + def _mq_do_read(self, evt, fd, what, userdata): + self.mqclient.loop_read() + if self.readEvt is not None: + self.readEvt.add() + + def _mq_do_write(self, evt, fd, what, userdata): + self.mqclient.loop_write() + + def _after_connect(self, client): + if self.mqclient.socket() is not None: + if self.readEvt is not None: + self.readEvt.delete() + if self.writeEvt is not None: + self.writeEvt.delete() + self.readEvt = libevent.Event(self.base, client.socket().fileno(), libevent.EV_READ | libevent.EV_PERSIST, self._mq_do_read) + self.writeEvt = libevent.Event(self.base, client.socket().fileno(), libevent.EV_WRITE, self._mq_do_write) + logger.debug("add MQ read & write event") + self.readEvt.add() + self.writeEvt.add() + self.timer = 1 + + def _on_connected(self, client): + pass + + def _on_disconnected(self, client): + logger.warn("delete MQ read & write event") + self.readEvt.delete() + self.writeEvt.delete() + self.readEvt = None + self.writeEvt = None + + def on_log(self, client, userdata, level, buf): + logger.info("%s -> %s" % (self.client_id, buf)) + + +def test_mq(): + + class App(object): + + def __init__(self, vendor_name, app_name): + self.base = libevent.Base() + self.libeventmq = MQClientLibevent(self.base, vendor_name) + self.pub_timer = libevent.Timer((self.base), + (self._pub_timer_handler), userdata=None) + self.pub_topic = "local/py/test" + self.pub_id = 0 + + def _on_test_sub(self, topic, payload): + print("received test msg: %s" % payload) + + def _on_pub_topic_ack_handler(self, topic, userdata): + print("Msg(topic %s, userdata %s)is sent" % (topic, userdata)) + + def _pub_timer_handler(self, evt, userdata): + if self.libeventmq.is_ready(): + self.libeventmq.publish(self.pub_topic, "hello AppTemplate, %d" % self.pub_id, 1, self.pub_id) + self.pub_id = self.pub_id + 1 + self.pub_timer.add(3) + + def on_test_sub(self, topic, payload): + print("received test msg: %s" % payload) + + def run(self): + self.pub_timer.add(5) + self.base.loop() + + app = App("vendor123", "app1") + app.libeventmq.init_mqclient() + app.libeventmq.add_sub("local/py/test", app.on_test_sub) + app.libeventmq.connect() + app.run() + + +if __name__ == "__main__": + test_mq() diff --git a/APPS_UNCOMPILED/src/common/MeasureGroup.py b/APPS_UNCOMPILED/src/common/MeasureGroup.py new file mode 100644 index 0000000..72a5d0f --- /dev/null +++ b/APPS_UNCOMPILED/src/common/MeasureGroup.py @@ -0,0 +1,33 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/MeasureGroup.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 733 bytes +""" +Controller +Created on 2020/11/13 +@author: Zhengyb +""" + +class MeasureGroup(object): + + def __init__(self, name=''): + self.name = name + self.uploadInterval = 10 + + def __loads_validate(self, json_obj): + return True + + def load_json_obj(self, json_obj): + if not self._MeasureGroup__loads_validate(json_obj): + raise ValueError(json_obj) + self.name = json_obj["name"] + self.uploadInterval = json_obj["uploadInterval"] + + def dump_json_obj(self): + json_obj = dict() + json_obj["name"] = self.name + json_obj["uploadInterval"] = self.uploadInterval + return json_obj diff --git a/APPS_UNCOMPILED/src/common/MeasurePoint.py b/APPS_UNCOMPILED/src/common/MeasurePoint.py new file mode 100644 index 0000000..38c2a67 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/MeasurePoint.py @@ -0,0 +1,52 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/MeasurePoint.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1256 bytes +""" +MeasurePoint +Created on 2020/11/13 +@author: Zhengyb +""" +import copy +import common.SendType as SendType +import common.DataType as DataType +import common.Permission as Permission + +class MeasurePoint(object): + + def __init__(self, name=''): + self.name = name + self.ctrlName = "" + self.groupName = "" + self.sendType = SendType.Periodic + self.dataType = DataType.Word + self.addr = "" + self.decimal = 2 + self.len = 0 + self.read_write = Permission.RO + self.unit = "" + self.desc = "" + self.transform = 0 + self.maxValue = 0 + self.minValue = 0 + self.maxScale = 0 + self.minScale = 0 + self.gain = 0 + self.offset = 0 + + def dump_json_obj(self): + d = copy.deepcopy(self.__dict__) + return d + + def load_json_obj(self, json_obj): + if not self._MeasurePoint__loads_validate(json_obj): + raise ValueError(json_obj) + for key in self.__dict__: + if key in json_obj: + self.__dict__[key] = json_obj[key] + + def __loads_validate(self, dmp): + return True diff --git a/APPS_UNCOMPILED/src/common/MobiusAPI.py b/APPS_UNCOMPILED/src/common/MobiusAPI.py new file mode 100644 index 0000000..2d6b6b7 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/MobiusAPI.py @@ -0,0 +1,121 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/MobiusAPI.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4721 bytes +import os, json +from .Logger import logger +from .Constant import AppInfo +from .InternalPath import EC_SYSTEM_INFO +if not os.path.exists(EC_SYSTEM_INFO): + from mobiuspi_lib.common.inbase import BaseRestful +else: + + class MobiusPi(object): + + def __init__(self): + if os.path.exists(EC_SYSTEM_INFO): + self.app_base_path = "/var/user" + self.app_run_base_path = "/var/run/python" + self.base_cfg_path = self.app_base_path + "/cfg/device_supervisor" + if not os.path.exists(self.base_cfg_path): + os.makedirs((self.base_cfg_path), 493, exist_ok=True) + self.app_cfg_path = self.base_cfg_path + "/device_supervisor.cfg" + self.app_path = self.app_base_path + "/app/device_supervisor" + try: + with open(EC_SYSTEM_INFO, "r", encoding="utf-8") as fp: + sys_info = json.load(fp) + self.product_number = sys_info["device_info"]["model_info"]["pn"] + self.serial_number = sys_info["device_info"]["model_info"]["sn"] + self.model_name = sys_info["device_info"]["model_info"]["model"] + self.product_mac = sys_info["device_info"]["hardware_info"]["interface"]["eth"][0]["iface_mac"] + except Exception as error: + try: + logger.info("Parse system_info.json failed(%s)" % error) + self.product_number = "" + self.serial_number = "" + self.model_name = "" + self.product_mac = "" + finally: + error = None + del error + + else: + from mobiuspi_lib.systeminfo import SystemInfo + from mobiuspi_lib.config import Config as AppConfig + self.app_config = AppConfig(app_name=(AppInfo.APP_NAME)) + self.app_base_path = self.app_config.app_base_path + self.app_run_base_path = self.app_config.app_run_base_path + self.base_cfg_path = self.app_config.get_app_cfg_path() + if not os.path.exists(self.base_cfg_path): + os.makedirs((self.base_cfg_path), 493, exist_ok=True) + self.app_cfg_path = self.app_config.get_app_cfg_file() + self.app_run_path = self.app_config.get_app_run_path() + self.app_path = self.app_config.get_app_path() + self.sys_info = SystemInfo() + self.product_number = self.sys_info.get_product_number() + self.serial_number = self.sys_info.get_serial_number() + self.product_mac = self.sys_info.get_mac_addr1() + self.model_name = self.sys_info.get_model_name() + self.cfg_path = self.base_cfg_path + "/" + AppInfo.APP_NAME + ".cfg" + + + if not os.path.exists(EC_SYSTEM_INFO): + + class ConfigAPI(BaseRestful): + + def __init__(self, ip='', port=''): + if ip and port: + BaseRestful.__init__(self, ip=ip, port=port) + else: + BaseRestful.__init__(self) + self._settime_url = self.url_base + "/v1/erlang/config?autosave=1" + self._gettime_url = self.url_base + "/v1/system/state" + self._geterlang_url = self.url_base + "/v1/erlang/config" + + def send_put_to_frontEnd(self, data): + return self.put_url_info(self._settime_url, data) + + def send_get_from_frontEnd(self): + return self.get_url_info(self._gettime_url) + + def get_erlang_status_from_frontEnd(self): + return self.get_url_info(self._geterlang_url) + + + else: + import requests + + class ConfigAPI: + + def __init__(self, ip='', port=''): + self.set_baiying_url = "http://127.0.0.1:9102/api/v1/dsa_config_cloud" + + def send_put_to_frontEnd(self, requestData): + try: + response = requests.put((self.set_baiying_url), data=(json.dumps(requestData)), verify=False, timeout=5) + if response.status_code != 200: + logger.error("set cloud failed, status_code:%s" % response.status_code) + return "failed" + results = json.loads(response.text) + if "result" in results: + if results["result"] == "ok": + logger.debug("set cloud success, results:%s" % results) + return "success" + logger.error("set cloud failed, results:%s" % results) + return "failed" + except Exception as e: + try: + logger.error("set cloud failed, results:%s" % e) + return "failed" + finally: + e = None + del e + + def send_get_from_frontEnd(self): + pass + + def get_erlang_status_from_frontEnd(self): + pass diff --git a/APPS_UNCOMPILED/src/common/Permission.py b/APPS_UNCOMPILED/src/common/Permission.py new file mode 100644 index 0000000..a287ab2 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/Permission.py @@ -0,0 +1,15 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/Permission.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 147 bytes +""" +Permission +Created on 2020/12/01 +@author: Zhengyb +""" +RO = "ro" +WO = "wo" +RW = "rw" diff --git a/APPS_UNCOMPILED/src/common/Protocol.py b/APPS_UNCOMPILED/src/common/Protocol.py new file mode 100644 index 0000000..556a3f7 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/Protocol.py @@ -0,0 +1,103 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/Protocol.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2679 bytes +""" +ProtocolType +Created on 2020/11/13 +@author: Zhengyb +""" +BacBip = "BACnet/IP" +BacMstp = "BACnet/MSTP" +MbRtu = "Modbus-RTU" +MbTcp = "Modbus-TCP" +MbAscii = "Modbus-Ascii" +MbRtuOverTcp = "Modbus-RTU-OverTcp" +MbRtuSlave = "Modbus-Rtu-Slave" +OpcUa = "OPC-UA" +OpcUa_PubSub = "OPC-UA-PUBSUB" +OpcDa = "OPC-DA" +Snap7 = "ISO-on-TCP" +Eip = "EtherNet/IP" +MC1C = "Mitsubishi MC 1C" +MC3C = "Mitsubishi MC 3C" +MC4C = "Mitsubishi MC 4C" +MC1E = "Mitsubishi MC 1E" +MC3E = "Mitsubishi MC 3E" +MCPS = "Mitsubishi CPU Port(Serial)" +MC3COT = "Mitsubishi MC 3C Over TCP" +VRCON = "Virtual Controller" +OMFT = "Omron FINS TCP" +OMFU = "Omron FINS UDP" +OMHLS = "Omron HL SERIAL" +SIPPI = "Siemens PPI" +EipPCCC = "EtherNet/IP(MicroLogix)" +DLT645_2007 = "DLT645-2007" +DLT645_1997 = "DLT645-1997" +PANMEW = "Panasonic Mewtocol" +IEC101 = "IEC101 Master" +IEC103 = "IEC103 Master" +IEC104 = "IEC104 Master" +TANCYV13 = "Tancy V1.3" +TATEKPROGRAM = "FatekProgram" +TATEKPROGRAM_OverTcp = "FatekProgramOverTcp" +Easycom = "Easy-Com" +SIFW = "Siemens-fw" +SIPPI_OverTcp = "Siemens PPI Over Tcp" +SI_WebApi = "Siemens WebApi" +MCR = "Melsec-MC-R" +MC3E_UDP = "Melsec-MC-3E-UDP" +MCFS_OverTcp = "Melsec Fx Serial Over Tcp" +MCFL = "Melsec Fx Links" +MCFL_OverTcp = "Melsec Fx Links Over Tcp" +OMHL_OverTcp = "Omron HL Over Tcp" +OMHLC = "Omron HL CMode" +OMHLC_OverTcp = "Omron HL CMode Over Tcp" +OMCN = "Omron CipNet" +OMCCN = "Omron ConnectedCipNet" +ABCC = "AllenBradley ConnectedCip" +ABMC = "AllenBradley MicroCip" +ABSLC = "AllenBradley SLC" +KEMC = "Keyence Mc" +KENano = "Keyence Nano" +KENano_OverTcp = "Keyence Nano Over Tcp" +PANMC = "Panasonic MC" +PANMEW_OverTcp = "Mewtocol Over Tcp" +BFADS = "Beckhoff ADS" +DTSerial = "Delta Serial" +DTAscii = "Delta Ascii" +DTTcp = "Delta Tcp" +DTSerial_OverTcp = "Delta Serial Over Tcp" +XJSerial = "XINJE Serial" +XJSerial_OverTcp = "XINJE Serial Over Tcp" +XJTcpNet = "XINJE TcpNet" +XJIN = "XINJE InternalNet" +VGSerial = "Vigor Serial" +VGSerial_OverTcp = "Vigor Serial Over Tcp" +FJSPB = "Fuji SPB" +FJSPB_OverTcp = "Fuji SPB Over Tcp" +FJSPHNet = "Fuji SPHNet" +GESRPT = "Ge SRPT" +YKGWLT = "Yokogawa Link Tcp" +DLT645_OverTcp = "DLT645 Over Tcp" +DLT698 = "DLT698" +IVSerial = "Inovance Serial" +IV_OverTcp = "Inovance Over TCP" +IV_TcpNet = "Inovance TcpNet" +EasyEthernet = "Easy-Ethernet" +CNCFS = "CNC FanucSerise0i" +Euromap63 = "Euromap-63" +DNP3Tcp = "DNP3 TCP" +DNP3Udp = "DNP3 UDP" +DNP3RTU = "DNP3 RTU" +RobotEfort = "ROBOT RobotEfort" +RobotAbb = "ROBOT RobotABB" +RobotFanuc = "ROBOT RobotFanuc" +Iec61850_MMS = "IEC61850-MMS" +CJT188_2004 = "CJT188-2004" +KeBaSocket = "KeBa-Socket" +Hj2122005_Serial = "HJ212-2005-Serial" +Hj212_Serial = "HJ212-2017-Serial" diff --git a/APPS_UNCOMPILED/src/common/Renjie.py b/APPS_UNCOMPILED/src/common/Renjie.py new file mode 100644 index 0000000..8a754e2 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/Renjie.py @@ -0,0 +1,929 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/Renjie.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 41900 bytes +""" +Renjie +Created on 2022/01/26 +@author: licj +""" +import os, base64, struct, fcntl, datetime +from Crypto.Cipher import AES +from common.Logger import logger +from quickfaas.config import get +from quickfaas.measure import write_plc_values +from quickfaas.measure import recall +from quickfaas.remotebus import publish +from quickfaas.messagebus import publish as local_publish +from quickfaas.global_dict import get_global_parameter +from ftplib import FTP +import json +TAG_SET_READ_BACK = 10325 +TAG_SET_PARAM_SET = 10303 +TAG_SET_UPGRADE_SET = 10308 +ENCRYPT_TYPE_AES128 = 1 +ENCRYPT_TYPE_AES256 = 2 +ENCRYPT_TYPE_SHA1 = 4 +ENCRYPT_TYPE_SHA2 = 8 +DATA_TYPE_INT8 = 1 +DATA_TYPE_UINT8 = 2 +DATA_TYPE_INT16 = 3 +DATA_TYPE_UINT16 = 4 +DATA_TYPE_INT32 = 5 +DATA_TYPE_UINT32 = 6 +DATA_TYPE_INT64 = 7 +DATA_TYPE_UINT64 = 8 +DATA_TYPE_FLOAT = 9 +DATA_TYPE_DOUBLE = 10 +DATA_TYPE_BOOl = 11 +DATA_TYPE_STRING = 12 +DATA_TYPE_BITSTRING = 13 +DATA_TYPE_DATETIME = 14 +DATA_TYPE_SIMPLE_DATATIME = 15 +DATA_TYPE_FAULT_STATUS = 16 +DATA_TYPE_FILE_DATA = 17 +DATA_TYPE_UINT8_ARRAY = 18 +DATA_TYPE_UINT16_ARRAY = 19 +DATA_TYPE_INT16_ARRAY = 20 +DATA_TYPE_INT8_ARRAY = 21 +DATA_TYPE_INT32_ARRAY = 22 +DATA_TYPE_UINT32_ARRAY = 23 +DATA_TYPE_INT64_ARRAY = 24 +DATA_TYPE_UINT64_ARRAY = 25 +DATA_TYPE_FLOAT_ARRAY = 26 +DATA_TYPE_DOUBLE_ARRAY = 27 +DATA_TYPE_BOOl_ARRAY = 28 +UPLOAD_STATUS_MALFUNCTION = 2003 +UPLOAD_STATUS_UPGRADE_SUCCESSFUL = 1004 +UPLOAD_STATUS_START_HANDLE = 3000 +UPLOAD_STATUS_UPGRADE_NOT_STARTED = 3001 +UPLOAD_STATUS_FTP_TRANSFER_COMPLETED = 3009 +Key1_encrypt_key = "Sun#0*2Tst223Kde" +Key2_encrypt_key = None +Key2_encrypt_key_file = "/var/user/app/device_supervisor/sign/renjie_key" +Key2_default_encrypt_key = "z416jm6dhh2wt6sd" +Local_upgrade_path = "/var/run/python/upgrade_file/" +Local_upgrade_file = None +Upload_info_file = "/var/run/python/upgrade_file/upload_upgrade.json" + +def swap_bytes(word_val): + msb = word_val >> 8 & 255 + lsb = word_val & 255 + return (lsb << 8) + msb + + +def calculate_crc(data): + CRC16table = (0, 49345, 49537, 320, 49921, 960, 640, 49729, 50689, 1728, 1920, + 51009, 1280, 50625, 50305, 1088, 52225, 3264, 3456, 52545, 3840, + 53185, 52865, 3648, 2560, 51905, 52097, 2880, 51457, 2496, 2176, + 51265, 55297, 6336, 6528, 55617, 6912, 56257, 55937, 6720, 7680, + 57025, 57217, 8000, 56577, 7616, 7296, 56385, 5120, 54465, 54657, + 5440, 55041, 6080, 5760, 54849, 53761, 4800, 4992, 54081, 4352, + 53697, 53377, 4160, 61441, 12480, 12672, 61761, 13056, 62401, 62081, + 12864, 13824, 63169, 63361, 14144, 62721, 13760, 13440, 62529, + 15360, 64705, 64897, 15680, 65281, 16320, 16000, 65089, 64001, + 15040, 15232, 64321, 14592, 63937, 63617, 14400, 10240, 59585, + 59777, 10560, 60161, 11200, 10880, 59969, 60929, 11968, 12160, + 61249, 11520, 60865, 60545, 11328, 58369, 9408, 9600, 58689, 9984, + 59329, 59009, 9792, 8704, 58049, 58241, 9024, 57601, 8640, 8320, + 57409, 40961, 24768, 24960, 41281, 25344, 41921, 41601, 25152, + 26112, 42689, 42881, 26432, 42241, 26048, 25728, 42049, 27648, + 44225, 44417, 27968, 44801, 28608, 28288, 44609, 43521, 27328, + 27520, 43841, 26880, 43457, 43137, 26688, 30720, 47297, 47489, + 31040, 47873, 31680, 31360, 47681, 48641, 32448, 32640, 48961, + 32000, 48577, 48257, 31808, 46081, 29888, 30080, 46401, 30464, + 47041, 46721, 30272, 29184, 45761, 45953, 29504, 45313, 29120, + 28800, 45121, 20480, 37057, 37249, 20800, 37633, 21440, 21120, + 37441, 38401, 22208, 22400, 38721, 21760, 38337, 38017, 21568, + 39937, 23744, 23936, 40257, 24320, 40897, 40577, 24128, 23040, + 39617, 39809, 23360, 39169, 22976, 22656, 38977, 34817, 18624, + 18816, 35137, 19200, 35777, 35457, 19008, 19968, 36545, 36737, + 20288, 36097, 19904, 19584, 35905, 17408, 33985, 34177, 17728, + 34561, 18368, 18048, 34369, 33281, 17088, 17280, 33601, 16640, + 33217, 32897, 16448) + crc = 65535 + for c in data: + crc = crc >> 8 ^ CRC16table[(c ^ crc) & 255] + + return swap_bytes(crc) + + +def AES_key1_encrypt(data): + AESCipher = AES.new(Key1_encrypt_key, AES.MODE_ECB) + length = 16 + count = len(data) % length + if count > 0: + data = data + (255).to_bytes(1, "big") * (length - count) + return AESCipher.encrypt(data) + + +def AES_key2_encrypt(data): + global Key2_encrypt_key + if Key2_encrypt_key is None: + if os.path.exists(Key2_encrypt_key_file): + with open(Key2_encrypt_key_file, "r", encoding="utf-8") as fp: + Key2_encrypt_key = fp.read() + elif Key2_encrypt_key: + AESCipher = AES.new(Key2_encrypt_key, AES.MODE_ECB) + else: + AESCipher = AES.new(Key2_default_encrypt_key, AES.MODE_ECB) + length = 16 + count = len(data) % length + if count > 0: + data = data + (255).to_bytes(1, "big") * (length - count) + return AESCipher.encrypt(data) + + +def AES_key1_decrypt(data): + AESCipher = AES.new(Key1_encrypt_key, AES.MODE_ECB) + return AESCipher.decrypt(data) + + +def AES_key2_decrypt(data): + global Key2_encrypt_key + if Key2_encrypt_key is None: + if os.path.exists(Key2_encrypt_key_file): + with open(Key2_encrypt_key_file, "r", encoding="utf-8") as fp: + Key2_encrypt_key = fp.read() + elif Key2_encrypt_key: + AESCipher = AES.new(Key2_encrypt_key, AES.MODE_ECB) + else: + AESCipher = AES.new(Key2_default_encrypt_key, AES.MODE_ECB) + return AESCipher.decrypt(data) + + +def build_request_head(sn, encrypt_type, data_length, data_crc): + request_data = bytes() + sn_length = 24 + sn_data = sn.encode() + count = len(sn_data) + if count < sn_length: + sn_data = sn_data + (0).to_bytes(1, "big") * (sn_length - count) + else: + sn_data = sn_data[None[:sn_length]] + request_data += sn_data + request_data += encrypt_type.to_bytes(1, "big") + request_data += data_length.to_bytes(2, "big") + request_data += data_crc.to_bytes(2, "big") + return request_data + + +def build_request_tlv_dataParse error at or near `COME_FROM' instruction at offset 642_0 + + +def padding_package(package): + length = 16 + count = len(package) % length + if count > 0: + package = package + (255).to_bytes(1, "big") * (length - count) + return package + + +def build_measure_tlv_data(config_measure, measure_value, is_back_read=False): + tlv_data = bytes() + if is_back_read: + tag = int(config_measure["desc"]) + 1 + else: + tag = int(config_measure["desc"]) + if config_measure["transformType"] != 0: + tlv_data = build_request_tlv_data(tag, DATA_TYPE_FLOAT, 1, measure_value, 4) + else: + if config_measure["dataType"] == "BIT": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_BOOl, 1, measure_value, 1) + else: + if config_measure["dataType"] == "BYTE": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_UINT8, 1, measure_value, 1) + else: + if config_measure["dataType"] == "SINT": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_INT8, 1, measure_value, 1) + else: + if config_measure["dataType"] == "WORD": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_UINT16, 1, measure_value, 2) + else: + if config_measure["dataType"] == "INT": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_INT16, 1, measure_value, 2) + else: + if config_measure["dataType"] == "DWORD": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_UINT32, 1, measure_value, 4) + else: + if config_measure["dataType"] == "DINT": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_INT32, 1, measure_value, 4) + else: + if config_measure["dataType"] == "FLOAT": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_FLOAT, 1, measure_value, 4) + else: + if config_measure["dataType"] == "STRING": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_STRING, len(str(measure_value)), str(measure_value), len(str(measure_value))) + else: + if config_measure["dataType"] == "LONG": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_INT64, 1, measure_value, 8) + else: + if config_measure["dataType"] == "ULONG": + tlv_data = build_request_tlv_data(tag, DATA_TYPE_UINT64, 1, measure_value, 8) + return tlv_data + + +def build_gw_tlv_data(type, gw_sn, device_type, device_id, device_name, setpara_ret=''): + tlv_data = bytes() + tlv_data += build_request_tlv_data(50001, DATA_TYPE_STRING, len(gw_sn), gw_sn, len(gw_sn)) + now_time = datetime.datetime.now() + device_current_time = bytes() + device_current_time += now_time.year.to_bytes(2, "big") + device_current_time += now_time.month.to_bytes(1, "big") + device_current_time += now_time.day.to_bytes(1, "big") + device_current_time += now_time.hour.to_bytes(1, "big") + device_current_time += now_time.minute.to_bytes(1, "big") + device_current_time += now_time.second.to_bytes(1, "big") + tlv_data += build_request_tlv_data(50002, DATA_TYPE_SIMPLE_DATATIME, 1, device_current_time, 7) + tlv_data += build_request_tlv_data(50003, DATA_TYPE_UINT8, 1, device_type, 1) + tlv_data += build_request_tlv_data(50004, DATA_TYPE_UINT8, 1, device_id, 1) + if type != "FaultStatus": + tlv_data += build_request_tlv_data(50005, DATA_TYPE_STRING, len(device_name), device_name, len(device_name)) + if type == "GetDevProperties" or type == "SetPara": + tlv_data += build_request_tlv_data(50006, DATA_TYPE_STRING, len(setpara_ret), setpara_ret, len(setpara_ret)) + if type != "FaultStatus": + tlv_data += build_request_tlv_data(50008, DATA_TYPE_UINT16, 1, 1808, 2) + return tlv_data + + +global_controller_config = dict() + +def update_global_controller_config(): + global global_controller_config + global_config = get() + for controller in global_config["controllers"]: + global_controller_config[controller["name"]] = dict() + + for measure in global_config["measures"]: + global_controller_config[measure["ctrlName"]][measure["name"]] = measure + + +def build_message_tlv_data(upload_type, controller_name, measures): + SecondData = 0 + tlv_data = bytes() + if controller_name not in global_controller_config: + update_global_controller_config() + for measure_name, value in measures.items(): + if not controller_name not in global_controller_config: + if measure_name not in global_controller_config[controller_name]: + continue + else: + config_measure = global_controller_config[controller_name][measure_name] + if not config_measure["desc"].isdigit(): + continue + if upload_type == "SecondData": + if int(config_measure["desc"]) in (39004, 39005) and value["raw_data"]: + SecondData = 1 + tlv_data += build_measure_tlv_data(config_measure, value["raw_data"]) + + if upload_type == "SecondData": + if SecondData == 0: + return + return tlv_data + + +def build_upload_Logger_data(upload_type, controller_name, measures, setpara_ret=''): + """ + upload_type: 发布主题的类型(GwData/RunData/SecondData/GetDevProperties/FaultStatus) + controller_name: 控制器名称 + measures: 上传的测点列表 + setpara_ret: 设定参数的结果,测点号:50006(可选) + """ + tlv_data = bytes() + global_parameter = get_global_parameter() + device_name = controller_name.split("-")[0] + device_id = int(controller_name.split("-")[1]) + device_type = int(controller_name.split("-")[2]) + tlv_data += build_gw_tlv_data(upload_type, global_parameter["SN"], device_type, device_id, device_name, setpara_ret) + if upload_type != "GwData": + message_tlv_data = build_message_tlv_data(upload_type, controller_name, measures) + if message_tlv_data is None: + return + tlv_data += message_tlv_data + if upload_type == "GetDevProperties": + tlv_data += build_request_tlv_data(10402, DATA_TYPE_UINT16, 1, 10310, 2) + if upload_type == "FaultStatus": + tlv_data += build_request_tlv_data(10516, DATA_TYPE_UINT16, 1, 1, 2) + tlv_data += build_request_tlv_data(23017, DATA_TYPE_UINT16, 1, 2, 2) + tlv_data = padding_package(tlv_data) + crc = calculate_crc(tlv_data) + head = build_request_head(global_parameter["SN"], ENCRYPT_TYPE_AES128, len(tlv_data), crc) + return head + AES_key2_encrypt(tlv_data) + + +def build_Logger_FaultMerge_data(controller_name, value): + tlv_data = bytes() + array_number = 1 + tlv_data += array_number.to_bytes(2, "little") + data_length = 17 + tlv_data += data_length.to_bytes(2, "little") + device_type = int(controller_name.split("-")[2]) + tlv_data += device_type.to_bytes(1, "little") + device_id = int(controller_name.split("-")[1]) + tlv_data += device_id.to_bytes(1, "little") + now_time = datetime.datetime.now() + device_current_time = bytes() + device_current_time += now_time.year.to_bytes(2, "big") + device_current_time += now_time.month.to_bytes(1, "big") + device_current_time += now_time.day.to_bytes(1, "big") + device_current_time += now_time.hour.to_bytes(1, "big") + device_current_time += now_time.minute.to_bytes(1, "big") + device_current_time += now_time.second.to_bytes(1, "big") + tlv_data += device_current_time + data_number = 2 + tlv_data += data_number.to_bytes(2, "big") + logger.info("FaultMerge value:%s" % value) + data1 = value & 65535 + data2 = value >> 16 + tlv_data += data1.to_bytes(2, "big") + tlv_data += data2.to_bytes(2, "big") + reason_number = 0 + tlv_data += reason_number.to_bytes(2, "little") + tlv_data = padding_package(tlv_data) + crc = calculate_crc(tlv_data) + head = build_request_head(get_global_parameter()["SN"], ENCRYPT_TYPE_AES128, len(tlv_data), crc) + return head + AES_key2_encrypt(tlv_data) + + +def check_package_validity(message, pc_password=False): + encrypt_type = int.from_bytes(message[0[:1]], "big") + if encrypt_type != ENCRYPT_TYPE_AES128: + logger.warn("Not Supported encrypt_type(%s)" % encrypt_type) + return False + else: + data_crc = int.from_bytes(message[3[:5]], "big") + if pc_password: + crc = calculate_crc(AES_key1_decrypt(message[5[:None]])) + else: + crc = calculate_crc(AES_key2_decrypt(message[5[:None]])) + if data_crc != crc: + logger.warn("Data CRC mismatching, Recive:0X%04X, Our:0X%04X" % (data_crc, crc)) + return False + return True + + +def parse_tlv_data(type, number, data): + length = 0 + tlv_data = bytes() + if type in [DATA_TYPE_INT8, DATA_TYPE_UINT8]: + length = 1 * number + tlv_data = int.from_bytes(data[0[:length]], "little") + else: + if type in [DATA_TYPE_INT16, DATA_TYPE_UINT16]: + length = 2 * number + tlv_data = int.from_bytes(data[0[:length]], "little") + else: + if type in [DATA_TYPE_INT32, DATA_TYPE_UINT32]: + length = 4 * number + tlv_data = int.from_bytes(data[0[:length]], "little") + else: + if type in [DATA_TYPE_INT64, DATA_TYPE_UINT64]: + length = 8 * number + tlv_data = int.from_bytes(data[0[:length]], "little") + else: + if type == DATA_TYPE_FLOAT: + length = 4 * number + tlv_data = struct.unpack("> 10 + if type < DATA_TYPE_INT8 or type > 28: + length_to_handle += 4 + continue + number = length & 1023 + message_dict[data_label] = dict() + message_dict[data_label]["type"] = type + message_dict[data_label]["number"] = number + tlv_length, message_dict[data_label]["data"] = parse_tlv_data(type, number, data[(length_to_handle + 4)[:None]]) + length_to_handle += 4 + tlv_length + + return message_dict + + +def set_gateway_current_time(msg_dict): + if 52000 not in msg_dict: + return + year = int.from_bytes(msg_dict[52000]["data"][0[:2]], "big") + month = int.from_bytes(msg_dict[52000]["data"][2[:3]], "big") + day = int.from_bytes(msg_dict[52000]["data"][3[:4]], "big") + hour = int.from_bytes(msg_dict[52000]["data"][4[:5]], "big") + minute = int.from_bytes(msg_dict[52000]["data"][5[:6]], "big") + second = int.from_bytes(msg_dict[52000]["data"][6[:7]], "big") + current_time = f"{year}-{month}-{day} {hour}:{minute}:{second}" + logger.info("Cloud set gateway current time:%s" % current_time) + os.system('date -s "{current_time}" | rtc'.format(current_time=current_time)) + + +def handle_GetDevProperties(message): + if not check_package_validity(message): + return + message_dict = parse_package_to_dict(message) + logger.info("GetDevProperties tlv data:%s" % message_dict) + + +def find_controller_by_slaveId(controllers, slaveId): + for controller in controllers: + if int(controller["args"]["slaveAddr"]) == slaveId: + return controller + + +def find_recall_measure_value(recall_measures, name): + for measure in recall_measures: + if measure["name"] == name: + return measure["value"] + + +global_controller_config2 = dict() + +def update_global_controller_config2(): + global global_controller_config2 + global_config = get() + for controller in global_config["controllers"]: + global_controller_config2[controller["name"]] = dict() + + for measure in global_config["measures"]: + if measure["addr"].isdigit(): + if int(measure["addr"][None[:1]]) != 4: + continue + global_controller_config2[measure["ctrlName"]][int(measure["addr"][1[:None]])] = measure + + +def build_readback_data(msg_dict): + status_code = 1 + tlv_data = bytes() + config = get() + controller = find_controller_by_slaveId(config["controllers"], int(msg_dict[10301]["data"][0])) + if controller is None: + logger.warn("Not found controllers by slaveId:%s" % int(msg_dict[10301]["data"][0])) + return (0, None) + if controller["name"] not in global_controller_config2: + update_global_controller_config2() + tags = msg_dict[10323]["data"] + recall_measures = recall([{'name':controller["name"], 'measures':[]}])[0]["measures"] + for i in range(0, len(tags), 2): + if not controller["name"] not in global_controller_config2: + if int(tags[i + 1]) not in global_controller_config2[controller["name"]]: + continue + measure = global_controller_config2[controller["name"]][int(tags[i + 1])] + measure_value = find_recall_measure_value(recall_measures, measure["name"]) + if measure_value is None: + continue + tlv_data += build_request_tlv_data(tags[i], DATA_TYPE_UINT16, 1, measure_value, 2) + tlv_data += build_request_tlv_data(tags[i] + 1, DATA_TYPE_UINT16, 1, 1, 2) + + return ( + status_code, tlv_data) + + +def find_write_measure_response_item(write_response, controller_name, measure_name): + for item in write_response: + if item["device"] == controller_name and item["var_name"] == measure_name: + return item + + +def build_param_set_data(msg_dict): + status_code = 1 + modbus_addr = int(msg_dict[10301]["data"][0]) + tags = msg_dict[10323]["data"] + if modbus_addr == 0: + tlv_data = None + write_request = {'protocol':"Modbus-RTU", + 'registers':[]} + for i in range(0, len(tags), 2): + register = {'function':3, + 'address':(tags[i + 1]) - 1, 'length':1, 'value':str(base64.b64encode(msg_dict[tags[i]]["data"].to_bytes(2, "little")), "utf-8")} + write_request["registers"].append(register) + + logger.info("Broadcast write request:%s" % write_request) + local_publish("ds2/eventbus/south/modbus/broadcast/write", write_request) + else: + tlv_data = bytes() + config = get() + controller = find_controller_by_slaveId(config["controllers"], modbus_addr) + if controller is None: + logger.warn("Not found controllers by slaveId:%s" % modbus_addr) + return (0, None) + if controller["name"] not in global_controller_config2: + update_global_controller_config2() + write_request = {(controller["name"]): {}} + for i in range(0, len(tags), 2): + if not controller["name"] not in global_controller_config2: + if int(tags[i + 1]) not in global_controller_config2[controller["name"]]: + continue + measure = global_controller_config2[controller["name"]][int(tags[i + 1])] + write_request[controller["name"]][measure["name"]] = msg_dict[tags[i]]["data"] + + logger.info("Write request:%s" % write_request) + write_plc_values(write_request) + for i in range(0, len(tags), 2): + tlv_data += build_request_tlv_data(tags[i], DATA_TYPE_UINT16, 1, msg_dict[tags[i]]["data"], 2) + tlv_data += build_request_tlv_data(tags[i] + 1, DATA_TYPE_UINT16, 1, 1, 2) + + return ( + status_code, tlv_data) + + +def build_publish_upgrade_tlv_data(request_id, schedule, status, journal_num): + global_config = get() + global_parameter = get_global_parameter() + tlv_data = bytes() + total_upgrade_id = list() + if status == UPLOAD_STATUS_UPGRADE_SUCCESSFUL: + setpara_ret = "1" + journal_num + else: + setpara_ret = "0" + journal_num + tlv_data += build_request_tlv_data(50000, DATA_TYPE_STRING, len("Inhands getway"), "Inhands getway", len("Inhands getway")) + tlv_data += build_request_tlv_data(10402, DATA_TYPE_UINT16, 1, TAG_SET_UPGRADE_SET, 2) + if request_id == 0: + tlv_data += build_gw_tlv_data("SetPara", global_parameter["SN"], 32, request_id, global_config["controllers"][0]["name"].split("-")[0], setpara_ret) + for controller in global_config["controllers"]: + upgrade_id = controller["args"]["slaveAddr"] + total_upgrade_id.append(upgrade_id) + upgrade_schedule_status = "6_" + str(schedule) + "_" + str(status) + tlv_data += build_request_tlv_data(upgrade_id, DATA_TYPE_STRING, len(upgrade_schedule_status), upgrade_schedule_status, len(upgrade_schedule_status)) + + tlv_data += build_request_tlv_data(request_id, DATA_TYPE_UINT8_ARRAY, len(total_upgrade_id), total_upgrade_id, 1) + else: + controller = find_controller_by_slaveId(global_config["controllers"], request_id) + if controller is None: + logger.warn("Not found controllers by slaveId:%s" % request_id) + logger.error("Failed to publish slaveId(%d) upgrade info to Renjie platform." % request_id) + return + tlv_data += build_gw_tlv_data("SetPara", global_parameter["SN"], 32, request_id, controller["name"].split("-")[0], setpara_ret) + upgrade_schedule = "6_" + str(schedule) + upgrade_status = "6_" + str(status) + tlv_data += build_request_tlv_data(1, DATA_TYPE_STRING, len(upgrade_schedule), upgrade_schedule, len(upgrade_schedule)) + tlv_data += build_request_tlv_data(2, DATA_TYPE_STRING, len(upgrade_status), upgrade_status, len(upgrade_status)) + tlv_data = padding_package(tlv_data) + crc = calculate_crc(tlv_data) + head = build_request_head(global_parameter["SN"], ENCRYPT_TYPE_AES128, len(tlv_data), crc) + upload_data = head + AES_key2_encrypt(tlv_data) + publish("Logger/SetPara", upload_data) + + +def upload_upgrade_data(msg_dict, wait_request_id, schedule, status): + global Upload_info_file + if msg_dict is not None and wait_request_id == -1: + journal_num = str(msg_dict[52005]["data"]) + request_addr = list(msg_dict[10301]["data"]) + for request_id in request_addr: + build_publish_upgrade_tlv_data(request_id, schedule, status, journal_num) + + else: + if msg_dict is None and wait_request_id != -1: + try: + upload_fd = open(Upload_info_file, "r") + except FileNotFoundError: + logger.warn("File(%s) is not found." % Upload_info_file) + return + else: + upload_fd.seek(0, 2) + filesize = upload_fd.tell() + upload_fd.seek(0, 0) + upload_info = json.loads(upload_fd.read(filesize)) + upload_fd.close() + journal_num = upload_info["journal_num"] + build_publish_upgrade_tlv_data(wait_request_id, schedule, status, journal_num) + else: + logger.error("Parameter passing is irregular.") + + +def connect_ftp_server(msg_dict): + ftp_ip = str(msg_dict[1]["data"]) + ftp_port = int(msg_dict[2]["data"]) + ftp_user, ftp_password = str(msg_dict[3]["data"]).split("/", 1) + ftp = FTP() + ftp.set_debuglevel(2) + try: + ftp.connect(ftp_ip, ftp_port, 60) + except Exception as e: + try: + logger.error("Connect to ftp server(%s:%d) failed: %s." % (ftp_ip, ftp_port, e)) + return + finally: + e = None + del e + + else: + ftp.login(ftp_user, ftp_password) + logger.info("%s" % ftp.getwelcome()) + return ftp + + +def download_upgrade_file(msg_dict): + global Local_upgrade_file + global Local_upgrade_path + ftp = connect_ftp_server(msg_dict) + if ftp is None: + return False + ftp_path_file = str(msg_dict[6]["data"]) + ftp_path, ftp_file = os.path.split(ftp_path_file) + ftp.cwd(ftp_path) + filesize = ftp.size(ftp_file) + if not os.path.exists(Local_upgrade_path): + os.makedirs(Local_upgrade_path) + Local_upgrade_file = Local_upgrade_path + ftp_file + try: + file_handle = open(Local_upgrade_file, "wb") + except OSError: + logger.error("Create local upgrade file(%s) failed." % Local_upgrade_file) + ftp.set_debuglevel(0) + ftp.quit + return False + else: + ftp.retrbinary("RETR %s" % ftp_file, file_handle.write, filesize) + upload_upgrade_data(msg_dict, -1, 0, UPLOAD_STATUS_FTP_TRANSFER_COMPLETED) + ftp.set_debuglevel(0) + ftp.quit + return True + + +def create_upload_info_file(msg_dict): + config = get() + if not config: + return False + else: + request_addr = list(msg_dict[10301]["data"]) + journal_num = str(msg_dict[52005]["data"]) + sub_info = {'journal_num':journal_num, + 'devices':[]} + if request_addr[0] == 0: + controller_name = config["controllers"][0]["name"] + device_name = controller_name.split("-")[0] + sub_info["devices"].append({'device_id':0, 'device_name':device_name}) + else: + request_addr_count = len(request_addr) + for i in range(0, request_addr_count): + controller = find_controller_by_slaveId(config["controllers"], request_addr[i]) + if controller is None: + logger.warn("Not found controllers by slaveId:%s" % request_addr[i]) + return False + controller_name = controller["name"] + device_name = controller_name.split("-")[0] + device_id = int(controller_name.split("-")[1]) + sub_info["devices"].append({'device_id':device_id, 'device_name':device_name}) + + try: + with open(Upload_info_file, "w", encoding="utf-8") as upload_fd: + fcntl.flock(upload_fd, fcntl.LOCK_EX) + json.dump(sub_info, upload_fd) + except OSError: + logger.error("Create file(%s) failed." % Upload_info_file) + return False + else: + return True + + +def build_upgrade_set_data(msg_dict): + resoult = download_upgrade_file(msg_dict) + if resoult is False: + upload_upgrade_data(msg_dict, -1, 0, UPLOAD_STATUS_MALFUNCTION) + return + resoult = create_upload_info_file(msg_dict) + if resoult is False: + upload_upgrade_data(msg_dict, -1, 0, UPLOAD_STATUS_MALFUNCTION) + return + request_addr = list(msg_dict[10301]["data"]) + upgrade_requese = {'upgrade_file': Local_upgrade_file, 'endpoint': '"rs485"', + 'protocol': '"Modbus-RTU"', 'slave_id': request_addr} + local_publish("ds2/eventbus/system/upgrade/robot", upgrade_requese) + + +def handle_SetPara(message): + if not check_package_validity(message): + return + message_dict = parse_package_to_dict(message) + logger.info("SetPara data:%s" % message_dict) + set_gateway_current_time(message_dict) + status_code = 0 + tlv_data = bytes() + global_parameter = get_global_parameter() + if TAG_SET_READ_BACK in message_dict: + if message_dict[TAG_SET_READ_BACK]["data"] != 0: + logger.warn("Query read back command invalid!") + return + tlv_data += build_request_tlv_data(10402, DATA_TYPE_UINT16, 1, TAG_SET_READ_BACK, 2) + status_code, response_data = build_readback_data(message_dict) + else: + if TAG_SET_PARAM_SET in message_dict: + if message_dict[TAG_SET_PARAM_SET]["data"] != 0: + logger.warn("Query set command invalid!") + return + tlv_data += build_request_tlv_data(10402, DATA_TYPE_UINT16, 1, TAG_SET_PARAM_SET, 2) + status_code, response_data = build_param_set_data(message_dict) + else: + if TAG_SET_UPGRADE_SET in message_dict: + if message_dict[TAG_SET_UPGRADE_SET]["data"] != 0: + logger.warn("Query set upgrade invalid!") + return + build_upgrade_set_data(message_dict) + return + logger.info("Unknow set command") + return + if response_data is None: + if status_code != 1: + logger.warn("Unable build response_data") + return + tlv_data += response_data + setpara_ret = str(status_code) + message_dict[52005]["data"] + tlv_data += build_gw_tlv_data("SetPara", global_parameter["SN"], 9, 247, "Logger1000", setpara_ret) + value_50010 = "0000000000000000011000001101011100000000000000000000000000000111" + tlv_data += build_request_tlv_data(50010, DATA_TYPE_STRING, len(value_50010), value_50010, len(value_50010)) + tlv_data += build_request_tlv_data(50011, DATA_TYPE_STRING, len("Logger1000"), "Logger1000", len("Logger1000")) + if 10323 in message_dict: + tlv_data += build_request_tlv_data(10323, DATA_TYPE_UINT16_ARRAY, message_dict[10323]["number"], message_dict[10323]["data"], 2) + tlv_data = padding_package(tlv_data) + crc = calculate_crc(tlv_data) + head = build_request_head(global_parameter["SN"], ENCRYPT_TYPE_AES128, len(tlv_data), crc) + logger.info("SetPara response(Logger/SetPara)") + publish("Logger/SetPara", head + AES_key2_encrypt(tlv_data)) + + +def build_upload_upgrade_data(): + try: + with open(Upload_info_file, "r", encoding="utf-8") as upload_fd: + fcntl.flock(upload_fd, fcntl.LOCK_EX) + upload_info = json.load(upload_fd) + except FileNotFoundError: + logger.warn("File(%s) is not found." % Upload_info_file) + return + else: + journal_num = upload_info["journal_num"] + devices = upload_info["devices"] + for device in devices: + device_id = device["device_id"] + device_name = device["device_name"] + try: + schedule_status = device["schedule_status"] + except KeyError: + logger.warn("No upgrade has been started from Station %d" % device_id) + upload_upgrade_data(None, device_id, 0, UPLOAD_STATUS_START_HANDLE) + return + else: + tlv_data = bytes() + global_parameter = get_global_parameter() + total_upgrade_id = list() + upgrade_suc_count = 0 + upgrade_sum = len(schedule_status) + for j in range(0, upgrade_sum): + upgrade_info = schedule_status[j] + upgrade_id = upgrade_info["upgrade_id"] + schedule = upgrade_info["schedule"] + status = upgrade_info["status"] + if 1004 == status: + upgrade_suc_count += 1 + if upgrade_id == device_id: + upgrade_schedule = "6_" + str(schedule) + upgrade_status = "6_" + str(status) + tlv_data += build_request_tlv_data(1, DATA_TYPE_STRING, len(upgrade_schedule), upgrade_schedule, len(upgrade_schedule)) + tlv_data += build_request_tlv_data(2, DATA_TYPE_STRING, len(upgrade_status), upgrade_status, len(upgrade_status)) + else: + total_upgrade_id.append(upgrade_id) + upgrade_schedule_status = "6_" + str(schedule) + "_" + str(status) + tlv_data += build_request_tlv_data(upgrade_id, DATA_TYPE_STRING, len(upgrade_schedule_status), upgrade_schedule_status, len(upgrade_schedule_status)) + + if device_id == 0: + tlv_data += build_request_tlv_data(device_id, DATA_TYPE_UINT8_ARRAY, len(total_upgrade_id), total_upgrade_id, 1) + elif upgrade_suc_count == upgrade_sum: + setpara_ret = "1" + journal_num + else: + setpara_ret = "0" + journal_num + tlv_data += build_gw_tlv_data("SetPara", global_parameter["SN"], 32, device_id, device_name, setpara_ret) + tlv_data += build_request_tlv_data(50000, DATA_TYPE_STRING, len("Inhands getway"), "Inhands getway", len("Inhands getway")) + tlv_data += build_request_tlv_data(10402, DATA_TYPE_UINT16, 1, TAG_SET_UPGRADE_SET, 2) + tlv_data = padding_package(tlv_data) + crc = calculate_crc(tlv_data) + head = build_request_head(global_parameter["SN"], ENCRYPT_TYPE_AES128, len(tlv_data), crc) + return head + AES_key2_encrypt(tlv_data) + + +def set_key2_encrypt_key(msg_dict): + global Key2_encrypt_key + if 52003 not in msg_dict: + return + Key2_encrypt_key = msg_dict[52003]["data"] + logger.info("Cloud config Key2_encrypt_key:%s" % Key2_encrypt_key) + try: + with open(Key2_encrypt_key_file, "w", encoding="utf-8") as fp: + fp.write(Key2_encrypt_key) + except Exception as error: + try: + logger.error("Write Key2_encrypt_key failed(%s)" % error) + finally: + error = None + del error + + +def handle_PC_Password(message): + if not check_package_validity(message, True): + return + message_dict = parse_package_to_dict(message, True) + logger.info("PC_Password tlv data:%s" % message_dict) + set_key2_encrypt_key(message_dict) + + +def handle_GMT8_Time(message): + if not check_package_validity(message): + return + message_dict = parse_package_to_dict(message) + logger.info("GMT8_Time tlv data:%s" % message_dict) \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/common/SendType.py b/APPS_UNCOMPILED/src/common/SendType.py new file mode 100644 index 0000000..3561750 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/SendType.py @@ -0,0 +1,15 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/SendType.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 174 bytes +""" +SendType +Created on 2020/12/01 +@author: Zhengyb +""" +Periodic = "periodic" +OnChange = "onChange" +Never = "never" diff --git a/APPS_UNCOMPILED/src/common/ServiceID.py b/APPS_UNCOMPILED/src/common/ServiceID.py new file mode 100644 index 0000000..0adfeac --- /dev/null +++ b/APPS_UNCOMPILED/src/common/ServiceID.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/ServiceID.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 720 bytes +MASTER_SERVICE_ID = 1 +SYSLOG_NG_SERVICE_ID = 2 +ERLANG_MQTT_SERVICE_ID = 1000 +MQTT_AGENT_SERVICE_ID = 1001 +BAIYING_AGENT_SERVICE_ID = 1000 +DATA_HUB_SERVICE_ID = 1010 +QUICK_FAAS_SERVICE_ID = 1011 +OPCUA_SERVER_SERVICE_ID = 1012 +IEC104_CLIENT_SERVICE_ID = 1013 +IEC104_SLAVE_SERVICE_ID = 1014 +IEC101_SLAVE_SERVICE_ID = 1015 +SL651_SLAVE_SERVICE_ID = 1016 +HJ212_CLIENT_SERVICE_ID = 1017 +MODBUSTCP_SLAVE_SERVICE_ID = 1018 +MODBUSRTU_SLAVE_SERVICE_ID = 1019 +BACNET_SERVER_SERVICE_ID = 1020 +DNP3_SERVER_SERVICE_ID = 1021 +IEC61850_SERVER_SERVICE_ID = 1022 +SNMP_AGENT_SERVICE_ID = 1023 +MINDSPHEREPUT_SERVICE_ID = 1024 diff --git a/APPS_UNCOMPILED/src/common/SouthConfig.py b/APPS_UNCOMPILED/src/common/SouthConfig.py new file mode 100644 index 0000000..9297e05 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/SouthConfig.py @@ -0,0 +1,129 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/SouthConfig.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4949 bytes +""" +SouthConfig +Created on 2020/12/01 +@author: Zhengyb +""" +import json +import common.Controller as Controller +import common.MeasurePoint as MeasurePoint +import common.MeasureGroup as MeasureGroup +import common.AlarmPolicy as AlarmPolicy +import common.ConfigMisc as ConfigMisc +ENDPOINT_CONFILE_PREFIX = "/etc/python/ds2/" + +class SouthConfig(object): + + def __init__(self): + self.controllers = list() + self.groups = list() + self.alarms = list() + self.misc = None + self.endpoints = list() + + def import_from_json(self, filename): + """ + 从filename导入配置 + """ + with open(filename, mode="r") as fp: + lines = fp.read() + config_data = json.loads(lines) + if "controllers" not in config_data: + raise ValueError("cannot find 'controllers' attribute in %s" % filename) + if "measures" not in config_data: + raise ValueError("cannot find 'measures' attribute in %s" % filename) + if "groups" not in config_data: + raise ValueError("cannot find 'groups' attribute in %s" % filename) + if "misc" not in config_data: + raise ValueError("cannot find 'misc' attribute in %s" % filename) + self.controllers = list() + for con_data in config_data["controllers"]: + con = Controller() + con.load_json_obj(con_data) + self.controllers.append(con) + if con.endpoint not in self.endpoints: + self.endpoints.append(con.endpoint) + + if "measures" not in config_data: + raise ValueError("cannot find 'measures' attribute in %s" % filename) + self.groups = list() + for grp_data in config_data["groups"]: + grp = MeasureGroup() + grp.load_json_obj(grp_data) + self.groups.append(grp) + + for mp_data in config_data["measures"]: + mp = MeasurePoint() + mp.load_json_obj(mp_data) + con = self.find_controller(mp.ctrlName) + if not con: + print("%s is not found in the controller list" % mp.ctrlName) + continue + con.add_measure_point(mp) + + self.alarms = list() + for ap_data in config_data["alarms"]: + ap = AlarmPolicy() + ap.load_json_obj(ap_data) + self.alarms.append(ap) + + self.misc = ConfigMisc() + self.misc.load_json_obj(config_data["misc"]) + + def export_json(self, indent=None, filename=None): + """ + 返回配置的json字符串,如果给定filename参数,则写入文件 + """ + config_data = dict() + config_data["controllers"] = list() + config_data["groups"] = list() + config_data["measures"] = list() + config_data["alarms"] = list() + config_data["misc"] = dict() + for con in self.controllers: + con_data = con.dump_json_obj() + config_data["controllers"].append(con_data) + for mp in con.measure_points: + mp_data = mp.dump_json_obj() + config_data["measures"].append(mp_data) + + for grp in self.groups: + grp_data = grp.dump_json_obj() + config_data["groups"].append(grp_data) + + for ap in self.alarms: + grp_data = ap.dump_json_obj() + config_data["alarms"].append(grp_data) + + config_data["misc"] = self.misc.dump_json_obj() + config_str = json.dumps(config_data, sort_keys=True, indent=indent) + if filename: + with open(filename, "w") as fp: + fp.write(config_str) + return config_str + + def export_endpoint_config(self, indent=None, filename=None): + pass + + def add_controller(self, con): + pass + + def del_controller(self, con): + pass + + def find_controller(self, con_name): + for con in self.controllers: + if con_name == con.name: + return con + + def get_controllers(self, skip, size): + """ + 返回控制器列表 + """ + pass diff --git a/APPS_UNCOMPILED/src/common/TriggerType.py b/APPS_UNCOMPILED/src/common/TriggerType.py new file mode 100644 index 0000000..2900dd1 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/TriggerType.py @@ -0,0 +1,18 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/TriggerType.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 338 bytes +""" +Drivers +Created on 2021/1/6 +@author: Lius +""" +MESSAGE_EVENT_MESSAGE = "message_event" +TIMER_EVENT_MESSAGE = "timer_event" +DSA_START_EVENT_MESSAGE = "dsa_start_event" +MEASURE_EVENT_MESSAGE = "measure_event" +WARNING_EVENT_MESSAGE = "warning_event" +COMMAND_EVENT_MESSAGE = "command_event" diff --git a/APPS_UNCOMPILED/src/common/Utilities.py b/APPS_UNCOMPILED/src/common/Utilities.py new file mode 100644 index 0000000..0164458 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/Utilities.py @@ -0,0 +1,154 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/Utilities.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5347 bytes +import sys, json, time, re +from .Logger import logger +from urllib import parse +import yaml + +class Utilities: + __slots__ = [] + + @staticmethod + def serialize_instance(obj): + d = {"__classname__": (obj.__class__.__name__)} + d.update(vars(obj)) + return d + + @staticmethod + def get_timezone_str(ts): + t = time.gmtime(ts) + timeStr = time.strftime("%Y-%m-%dT%H:%M:%SZ", t) + return timeStr + + @staticmethod + def get_time_str(ts): + t = time.localtime(ts) + time_str = time.strftime("%Y-%m-%d %H:%M:%S", t) + return time_str + + @classmethod + def unicode_convertParse error at or near `LOAD_DICTCOMP' instruction at offset 32 + + @classmethod + def json_to_obj(cls, j_str): + if sys.version > "3": + return json.loads(j_str) + return cls.unicode_convert(json.loads(j_str)) + + @staticmethod + def dict_to_obj(d): + + class Obj(object): + __slots__ = d.keys() + + def __init__(self, **entries): + for k, v in entries.items(): + setattr(self, k, v) + + return Obj(**d) + + @staticmethod + def payload_transfer(payload): + """ + transfer payload to json format + :param payload: eg: query:cursor=0&limit=50&id=95cf7625cf073a56b2bd0a4f356e1c59 + :return: + { + "cursor" = "0" + "limit" = "50" + "id"= "95cf7625cf073a56b2bd0a4f356e1c59" + } + + """ + try: + if not isinstance(payload, str): + payload = payload.decode() + else: + salt = "query:" + if salt == payload: + return "" + back_data = dict() + if salt in payload: + payload = parse.unquote(payload) + data = payload.split(salt)[1] + if "&" in data: + data_list = data.split("&") + for d in data_list: + kv = d.split("=") + k = str(kv[0]) + v = kv[1] + back_data[k] = v + + return back_data + kv = data.split("=") + k = str(kv[0]) + v = kv[1] + back_data[k] = v + logger.debug("payload: %s ,trans to : %s" % (payload, back_data)) + return back_data + else: + back_data = yaml.safe_load(payload) + return back_data + except Exception as e: + try: + logger.error("payload transfer error, payload: %s, error: %s" % (payload, e)) + finally: + e = None + del e + + @staticmethod + def linkCheckout(target_host, target_port): + import socket + try: + s = socket.socket() + s.settimeout(1) + status = s.connect_ex((target_host, target_port)) + s.close() + if status == 0: + logger.debug("Network Connection OK.") + return True + except Exception as e: + try: + logger.debug("Link checkout except: %s" % e) + finally: + e = None + del e + + logger.warn("Network connection failed. host: %s, port: %s" % (target_host, target_port)) + return False + + @staticmethod + def get_plc_value_before_data_operation(value, var): + ret_value = value + if var.data_operation != 0: + if not isinstance(value, (str, bool)): + if not re.match("bit", var.data_type, re.M | re.I): + try: + if var.data_operation == 1: + if value > var.max_scale: + ret_value = var.max_scale + else: + if value < var.min_scale: + ret_value = var.min_scale + ret_value = (value - var.minScale) / var.limit_scale + var.minValue + else: + if var.data_operation == 2: + ret_value = (value - var.offset) / var.gain + elif isinstance(value, float): + ret_value = float(ret_value) + else: + ret_value = int(ret_value) + except Exception as e: + try: + ret_value = None + logger.error("An exception occurred during calculation: %s" % e.__str__()) + finally: + e = None + del e + + return ret_value \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/common/__init__.py b/APPS_UNCOMPILED/src/common/__init__.py new file mode 100644 index 0000000..1e343e5 --- /dev/null +++ b/APPS_UNCOMPILED/src/common/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/common/__init__.py +# Compiled at: 2024-04-18 03:12:55 +pass diff --git a/APPS_UNCOMPILED/src/common/__pycache__/InternalTopic.cpython-312.pyc b/APPS_UNCOMPILED/src/common/__pycache__/InternalTopic.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..516f2adbbac3827d54e6eef863848b32e88cacf1 GIT binary patch literal 4981 zcmbuD$#WCg8NemG!HmJIHpZ6ZMY4_Mg=ZVgK7f`+>L5!(3yTf&mh5ooCZ1R%aACq`a;qio6pIBL1Sw`@0x&IEQDjTA7eIg~Oa{u2f({#mlV80$} zz4>f_-yc1^r{smWJ=k07<$G~Y6T~G7sF%s; zLO58O!lBYMo+{1Y>C$;TQ<}x+OJO|Q{d4!!7L#6}~x)WM0|?LbAdsstl$tRbE!Wq(dM##JbEkM-VSn zx%zq+Z$pTy3R`L&sSOO%Y-zj@)|-Q9T@uyCp4FphomU){<9md|kO+s-HYV3Y-p-N1 zuOa#?NdVW|twdd-;1@;Utd;x#5*rnjLp2^Q7Tg>H;A9oVXq)7i=4$-wW&l+-V23>J zESmX4?Rwag*tH;G>t3tAQe<340^Bu{0_XV>I)*D(dA4rNCaQ`sRg29N zXj^RX5@%gztN<6oSQlHlf#w~99yH&ab?LwcrL8Zz4|Hm}ow*aWF56B1i=NzhIjU|~ z!>IB7IxernhF^$iJEllg`$q9Qf(@RNlrlUXxuui#%s^Oj$+Yd-_s!f_RlvG+FJ9kJ zl!jI7&1J+j8rJn>e=4X@X44raMw?R|DtS8h03>X_5Gh#6GU>!Vv@)O-?C?eBQjv7R z9dEirCz4_w7t?Gp8;`)2Aho+pw}6RUrkLHMkpbL(Ru@2$x5Q8o`DSwWR#Lc0- zs}FNbfi_3GF0(m0pEdn^WtnWO7-84wLNXIK&$XrYX|BgN=*y^)Yj~WDv#AVpLZ^+D1QEGTyk!%o$;>e6yBX8f;w@>8zF(yC1=b$(R*QF* z%2C_N&2^4@JQeL6zwBctSJ!~=@!`&SrlUYsYiH7VxCh8^w36$j75B1FE?K3PfDSSthZ%w1OhpY@RKqW0|!q zlcM7Yo_pb8xmJTOD!abodkeOuPG3 zUwEofVWRUypMPTM1h4(PrUw~~8aJkIYPU1`bVj3shHqKBw5I#kH0qq;U(r^xx<9K? zBgW(v?fNr)5?qcMp=&>9^w2$xnlQ#!wW}F@98CSj>@6+wT%UbzWoDT&x*yz+7}Gbj zTaNn~2z6hdxvxOw0v5u>a(iF z%Sr9tbA1_NT`=Y%T6{~N+tR4>#>7L73K(Oc?=vPITO4{|<+)I?_HS2h25J{F>^$eo zRxK8JjhZw3i4URo!u#@X!Uy5w_n&@O()}gvX{9>hk5`ajv7AtL+^X$ee^fx1M~6Tr{p8u_egtE)qPb`$QysIybZq%{u+K4{$>8t zt+eh>Yxj$~AMzaezvB8AG?di+NpeJRbUA%8XB|;sQCrIEfxMO5%w_Ewug}09L&jK1 zr+m^arxnA!W*^I9vBI*_lCvm%YZ1}84P+j^L}z%`3QoDUK71INh#y#&{Rlb!Ubo zyCN#hqrqt!f|3+aEg2+7wrf}RugFQ9CjFI4{m~3wG^-ImEYk19@y)T%$Ko_puD zfw7yagShwHd(S!d&Yg3ekFzemm%6U1LpLw}qavHnva<_MNx$uVMB ztx2u64cl?2U_8ksQw+O7M`8H+C?6h6jf7dDEgVyqrj|CXu|5dI9KjL8 zBul`XQ!M!^32*LZZBRQ{4{L|o$$D7_)GqYq!v!3bD9Kl*(kDsQ1w5W~0YfXUzSPJF z$H(+}5Q_u;(T=TOhu%42lAI(Ws2@Kp0WEF!2>2ql8@&Oxkj;kUnnKAZ6_IJ?7eOa_baG zfyb)PJ*4XNakB=gQoZ#*Ng3Z7TbuDcSUYIkqHAOw&@#SenqZs*-Hc6f2_nx$#}(Jf z6h9sn!PEVOQZSJmO{QK*GUbW``KIEFrIJEwjEkI#CfPBLS7_w6MyrtJPE3qMPI6Ik zg6EV1@YIobazyc-hzjvoM=E(TKBCx#w1B2#Qxl>>YZ7=;#XZ7_s-GzZ5hM&VI6J!z z9_>+_7-bYn01kvViW5sJj(GB9N^x>LpW+oa_hO7YEyhzxrQn4qpG0#z5TQ^yyeQOo z^u-ftkrNaudirz(Lyk>x2;A679vQxXyeJg<*N?+T1&$ZO$#^Uk?o7od#<`>@gnQ!R zlM^SxPYyud6dG@Ud_#H9nq7hSe8pIh|Ih zNF<(&i;+mir*-Nb$RdR=p_n3aWr1nOo9Cg>&h3Z-@ua?W3r1GYVWmhXMRopD!1sdqpEy20xW}gGnlcVSv`V1{oRT5P- z&z?&xB(fBf+fp%ieD=7!rBT|_h}p^lbA7XYa#_7pR*x64iKC+GTyP;MSA?aCFmM#X zYE_XYsjLZ$lvmD8&Q8kZJEZa*&_U+{L8uCIff}{iHtqOz;X1r35i<}F8U~iW42_Lg z3SN-Ui-h61tX2fZ3_TKReW;qW<)I46k{RfG349?@kjJEvlXf-_Kht)`G4U4|@`Uo9 zum14P+Up--b6lHww^p!4SBLh1BhjUt9a+9c=&m_dmr?aChcD_~yo&>iUcoX|9?- zWyXOP@insW0mi|HpjJHKQ4w_>r351I3Gip#%Yj+(qH!RQ1A)vokimjHLUHWkXhcBY z5A79?U{LXb05~r;_e@D2ZlJ04KEx#KhhmDjOA{4f6JQ}=q`osgWa|`|YK=tIEIPAP zL#|}&{L@lN?Q{X|4@gvCzI<`p3RTC~fd=b{{3nRKU`3XZjxVvcCzdqrB~6IvjqsCj z>6BR~jF@fqO_HJ!ITJ=?G{?;d4G3&>Tdy%;#3+L?e?>+tGDpp|BfPiDXOR7~4(tUs z(eM|m2xxi^_7`&!gG!Veq(MyD%{9OZETa59@&QAGNtl{&gv#uE>J+DWLr9A~p;$B- zN+rkAp%XB~i6Mww(B?uYF>Qg6RuE-b9`a8J8mZW*m_E&Ag7-^8Gg~1IJxaq>x zvPw0yERNqSdwANNC)*@ZO&2S&)Gp{#BNcXkhaB1^g?6n_538=89|Vw7qV{?nz}f%l z>ZUVTxVo7Hvo_X##i1XPAk8+?<4GJ`z^x5UtaH+qffn=`Iz=O0O;|W^q7>d{!vk() zK#DoUx}e_zVDN!WgRLI`F-A&gYR+4sks_L-rc;UrJXyK9h+G3dy2L$Uq)O*B>kTks zk-{fX5Zds-j%?^bO?`fn02px(WEcb^5Qk96x<7uz{{|7{DTY)i?!~wuMpC0Z`q~N^ z{Tc=c#;!Qvpv&QO@*qwpA<2!6PMq#YO(aF8m`C3SV}xQ$0R)XlM>#gm3rbPn;ht{D z5hH^`UERIMc=UdJ4Jwdz@H?SUN{3QnRD|rc8`i10KL!pSeKUSH;wjQzBci%K{|FjE zn8ELXLI(?^9S3P4p5gdMp|t@Z80a!0bY7^(te?Z6h7iGJZ_q%9gIT%*Aq3@_@t*O% zJFwVst!{c*qrl~KqflA zFn)d8T{}^AfLtfYs{J_Ey@ld`g+x`z)OLy5etyq|{Y(3^RI33X=aVXR@4MeoTs;k-3ifP#5Xpd0;5HtYAX$}P-@kF)6?4o~Jn4N9LMoDS|etlUz79fc9A zSu8-wT8B9cj9Y-!{daY=6C9=C9&51pM%HtfYe>TzF}4X7Ho=p3WC&@Qw3d|5{O-$l zFW&y>t+g9dYcsQ7U3=xLtM8%2{p$N`uYYpqy=!Y9y^H9x7qKIvV{Q|3`@>(Yz5N#O zf~cW-fT4LW%%$cZg-7t&-5Mx(bStm{9h5?z8&5rNKnvPF#*TA)#igDBRIET&uP60< z6ErdvS}w8qq|gJyf{Ga5QUqO+^OefJ+GSsD&i}w_?SpddUa5BP&Dwp_omn~rP$JVI zi4Ms$Bhk#2#;eVjnzQuY)j*9LU;sC+JajX#XSz3!uUVp-FUElf!BF3sKAEnS=-S0j zxo)piw|9kp^j2xPT-vx?+6WUGJPi`vuqeunk4cS>W$AWU)J*0~=4{Wo{)PT!ACvQ! z&C#>8?5~&n^^5F<#8P6#-wJ@_e{eHmAkW8$F^x|bA_@fffz03VVf_}4KQ#$w&QaJj zzWDxUiI>Ra5b)YI@min(@U{c5eG{(*A^>j#@H#f}TBHuZ>;G@7#P^$qZ`{Lotdf|# za}`J#_fq=q!43tj`z=$VJZ)(d)|~_V!=MU>My24H149G718`r|H8ga1i18|(xDZbY zVl)}!;Oa@_)i7&F@O?fQ1-clJouyDjG{n|HnObFlAo^a4HWyj2>Q?^!Obh1iHpaklGLd2rFaT+Xb9 z8ZY>kd<)K7m7&F+<;oqabvrKfFZC~U-3Fp>sc*h3w`D7Y)eW~gT$ZZCxx5ysWx7hD zt7IBGz{CpO3`R{?VWS>^w8p`wQX(9Z>~J&D%>Z^~+^ubUT6V%;YupCb3_TRwl8NZp z&ZDC`>?K@aq5HB(MDau->ODWubR_b#i75UBAQItgftBBZibhnR`~o9K-o6h)-z5Ss z1@9)0919nPKnO_WRbt&y?A*TI;BnTk2i(rx>krV*U4IW2JKOFy5H$_w#umoDBTAet zsurX`vBQr742duC7_W+qepo!ZfowUYp2!3q#T@SAFgylv(7|UGpvZKFN(N0vzm*I{OhDdekeXk?`k}?CMaBAg znaTP3F8Rr&xv6<2#rp1KrR3|CoY`; literal 0 HcmV?d00001 diff --git a/APPS_UNCOMPILED/src/drivers/Drivers.py b/APPS_UNCOMPILED/src/drivers/Drivers.py new file mode 100644 index 0000000..9302b76 --- /dev/null +++ b/APPS_UNCOMPILED/src/drivers/Drivers.py @@ -0,0 +1,430 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/drivers/Drivers.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 19181 bytes +""" +Drivers +Created on 2020/12/28 +@author: Lius +""" +import os, time, sys, json, signal, logging, argparse, libevent, random, string +from urllib import parse +from datetime import datetime +from common import Protocol +from common.Logger import logger +from common.MQClient import MQClientLibevent +import common.InternalTopic as InternalTopic +from common.InternalPath import EC_SYSTEM_INFO +VAR_RUN_PYTHON_DS2_PATH = "/var/run/python/ds2" +DRIVER_PUBLISH_MSEC_MSG_TOPIC = "ds2/eventbus/south/upload/msec/data/{controllerName}" +DATAHUB_STATUS_FILE_PATH = "/var/run/python/ds2/datahub_status.json" +DATAHUB_STATUS_NOT_START = "not_start" +DATAHUB_STATUS_START_INIT = "start_init" +DATAHUB_STATUS_LOADING_CONFIG = "loading_config" +DATAHUB_STATUS_RELOADING_CONFIG = "reloading_config" +DATAHUB_STATUS_INIT_FINISH = "init_finish" +POLLING_SECOND = 1 +POLLING_MILLISECOND = 2 +POLLING_SECOND2 = 3 + +class Drivers(object): + + def __init__(self, service_id, config_file): + self.base = libevent.Base() + self.service_id = service_id + self.config_file = config_file + self.devices = dict() + self.cont_acquisition = 1 + self.coms = list() + self.mqclient = None + self.cfg_info = dict() + self.get_datahub_status_event = libevent.Timer((self.base), (self.get_datahub_status_timer), userdata=None) + self._Drivers__register_signal() + + def __register_signal(self): + signal.signal(signal.SIGINT, self._Drivers__signal_handler) + signal.signal(signal.SIGTERM, self._Drivers__signal_handler) + + def delete_pid_to_file(self, service_id): + pid_file_path = VAR_RUN_PYTHON_DS2_PATH + "/" + "drvr.py-" + service_id + if os.path.exists(pid_file_path): + os.remove(pid_file_path) + + def __signal_handler(self, signalnum, handler): + logger.info("signal. signalnum: %s" % signalnum) + self.base.loopexit(0) + for dev_name in self.devices: + self.devices[dev_name].disconnect() + + self.delete_pid_to_file(str(self.service_id)) + sys.exit() + + def load_config(self): + if not os.path.exists(self.config_file): + raise ValueError("File {} does not exist".format(self.config_file)) + json_config = dict() + with open((self.config_file), "r", encoding="utf-8-sig") as fd: + json_config = json.load(fd) + return json_config + + def get_device_instance(self, dev): + if dev["protocol"] == Protocol.Snap7: + from .S7Driver import S7Driver + s7 = S7Driver(dev, self.cont_acquisition, self.base, self.print_debug_log) + s7.publish_measures_data = self.publish_measures_data + return s7 + if dev["protocol"] == Protocol.OpcDa: + from .OpcDaDriver import OpcDaDriver + opcda = OpcDaDriver(dev, 1, self.base, self.print_debug_log) + opcda.publish_measures_data = self.publish_measures_data + return opcda + if dev["protocol"] == Protocol.OpcUa: + from .OpcUaDriver import OpcUaDriver + opcua = OpcUaDriver(dev, self.cont_acquisition, self.base) + opcua.publish_measures_data = self.publish_measures_data + return opcua + if dev["protocol"] == Protocol.Eip: + from .EnIPDriver import EnIPDriver + enip = EnIPDriver(dev, self.cont_acquisition, self.base) + enip.publish_measures_data = self.publish_measures_data + return enip + if "Mitsubishi" in dev["protocol"]: + self._get_serial_instance() + from .MCDriver import MCDriver + mc = MCDriver(dev, self.cont_acquisition, self.base, self.coms) + mc.publish_measures_data = self.publish_measures_data + return mc + raise ValueError("Unsupported protocol: %s" % dev["protocol"]) + + def update_device_instance(self, dev): + try: + if dev["name"] not in self.devices: + self.devices[dev["name"]] = self.get_device_instance(dev) + except Exception as error: + try: + logger.error("get device instance failed(%s)" % error) + finally: + error = None + del error + + def update_measures_to_device_instance(self, measures): + if not isinstance(measures, list): + measures = [ + measures] + logger.debug("Loading measures...") + measures_table = {} + for tag in measures: + try: + device_name = tag["ctrlName"] + if "deleted" in tag: + continue + if device_name not in self.devices: + raise KeyError("Not found device: %s" % tag["ctrlName"]) + logger.debug("Loading measure(%s) in controller(%s)" % (tag["name"], tag["ctrlName"])) + if device_name not in measures_table: + measures_table[device_name] = list() + measures_table[device_name].append(tag) + except Exception as error: + try: + logger.error("Loading measure failed(%s)" % error) + finally: + error = None + del error + + for device_name in measures_table: + self.devices[device_name].update_variables(measures_table[device_name]) + + logger.debug("measures load success") + + def _get_serial_instance(self): + import serial + from mobiuspi_lib.serial import Serial as IG_serial + ig_serial = IG_serial() + for com in self.coms: + com["port"] = ig_serial.get_serial_path(com["name"]) + com["serial_ins"] = serial.Serial(port=(com["port"]), baudrate=(com["baud"]), + bytesize=(com["bits"]), + parity=(com["parityChk"].upper()), + stopbits=(com["stopbits"]), + xonxoff=0) + + def get_datahub_service_status(self): + if not os.path.exists(DATAHUB_STATUS_FILE_PATH): + return DATAHUB_STATUS_NOT_START + json_config = dict() + with open(DATAHUB_STATUS_FILE_PATH, "r", encoding="utf-8-sig") as fd: + json_config = json.load(fd) + if "status" in json_config: + return json_config["status"] + + def get_datahub_status_timer(self, evt, userdata): + if self.get_datahub_service_status() != DATAHUB_STATUS_INIT_FINISH: + self.get_datahub_status_event.add(1) + return + for dev in self.devices.values(): + if dev.polling_maps: + dev.read_evt_timer.add(0.1) + if dev.msec_polling_maps: + if dev.enable_msec_sample: + dev.msec_read_evt_timer.add(0.1) + if dev.cust_polling_maps and dev.enable_cust_sample: + dev.cust_read_evt_timer.add(0.1) + + def run_driver(self, cfg): + self.cfg_info = cfg + client_id = cfg["protocol"] + "@" + "".join(random.sample(string.ascii_letters + string.digits, 8)) + self._init_mqclient(client_id, cfg["controllers"][0]) + if "misc" in cfg: + self.coms = cfg["misc"]["coms"] + upper_level = cfg["misc"]["logLvl"].upper() + if upper_level == "DEBUG": + logger.set_level(logging.DEBUG) + else: + if upper_level == "INFO": + logger.set_level(logging.INFO) + else: + if upper_level == "WARN" or upper_level == "WARNING": + logger.set_level(logging.WARN) + else: + if upper_level == "ERROR": + logger.set_level(logging.ERROR) + if "controllers" in cfg: + for dev in cfg["controllers"]: + dev["debug"] = 0 + dev["debug_heartbeat"] = 0 + self.update_device_instance(dev) + self.mqclient.add_sub(InternalTopic.DATAHUB_RECALL_MEASURE.format(CtrlName=(dev["name"])), self.on_partner_recall_value) + + if "measures" in cfg: + self.update_measures_to_device_instance(cfg["measures"]) + self.get_datahub_status_event.add(1) + self.base.loop() + + def publish_measures_data(self, data, pollingType, ctrlName): + if pollingType == POLLING_MILLISECOND: + measures_payload = data + self.publish_data(DRIVER_PUBLISH_MSEC_MSG_TOPIC.format(controllerName=ctrlName), measures_payload, pollingType) + else: + measures_payload = {} + measures_payload["controllers"] = [ + data] + self.publish_data(InternalTopic.EVENT_BUS_SOUTH_READ.format(driverServiceId=(self.service_id)), measures_payload, pollingType) + + def publish_data(self, topic, message, pollingType): + try: + if self.mqclient.is_ready(): + message = message if isinstance(message, (str, bytes)) else json.dumps(message) + if pollingType == POLLING_MILLISECOND: + logger.debug("Polling MilliSecond message, topic:%s, payload length:%s Bytes" % (topic, len(message))) + else: + if pollingType == POLLING_SECOND: + logger.debug("Polling Second cycle (1) message, topic:%s, payload length:%s Bytes" % (topic, len(message))) + else: + if pollingType == POLLING_SECOND2: + logger.debug("Polling Second cycle (2) message, topic:%s, payload length:%s Bytes" % (topic, len(message))) + return self.mqclient.publish(topic, message, qos=0) + return False + except Exception as e: + try: + logger.error("Publish message error. %s" % e) + finally: + e = None + del e + + def on_partner_write_value(self, topic, payload): + """This function is partner write PLC data.""" + try: + is_self = False + payload = json.loads(payload) + for ctrl in payload["payload"]: + if ctrl["name"] in self.devices: + logger.debug("Write measure message: %s" % payload) + is_self = True + for measure in ctrl["measures"]: + if "error_code" in measure: + continue + err_code, params, value = self.devices[ctrl["name"]].write_variables_value(measure) + measure["error_code"] = err_code + measure["error_reason"] = params + if measure["error_code"] != 0: + measure["value"] = value + + if is_self: + resp_topic = InternalTopic.EVENT_BUS_SOUTH_WRITE_RESP.format(requestServiceId=(self.service_id)) + logger.debug("Write measure result. topic: %s. payload: %s." % (resp_topic, payload)) + self.publish_data(resp_topic, payload, False) + except Exception as e: + try: + logger.error("Write measure error. %s" % e) + finally: + e = None + del e + + def on_partner_recall_value(self, topic, payload): + """This function is partner recall PLC data.""" + try: + is_self = False + payload = json.loads(payload) + ctrlName = payload["controllerName"] + response = {'task_id':payload["task_id"], 'controllerName':ctrlName, 'measures':[]} + if ctrlName in self.devices: + logger.debug("Recall measure message: %s" % payload) + is_self = True + readMeas = self.devices[ctrlName].recall_variables_value(payload) + if readMeas: + for readMea in readMeas: + response["measures"].append({'name':readMea["name"], + 'health':readMea["health"], 'value':readMea["value"], + 'timestamp':readMea["timestamp"]}) + + if is_self: + resp_topic = InternalTopic.DATAHUB_RECALL_MEASURE_RESPONSE.format(CtrlName=ctrlName) + logger.debug("Recall measure result. topic: %s. payload: %s." % (resp_topic, response)) + self.publish_data(resp_topic, response, False) + except Exception as e: + try: + logger.error("Recall measure error. %s" % e) + finally: + e = None + del e + + def get_system_uptime_in_s(self): + return time.clock_gettime(time.CLOCK_MONOTONIC) + + def on_sys_service_status_ping(self, topic, payload): + """This function is ping.""" + try: + payload = json.loads(payload) + haveAbormal = False + if "message" in payload: + if payload["message"] == "ping": + resp_topic = InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PONG + for dev in self.devices.values(): + if dev.polling_maps: + if dev.first_scan1_finish: + if not dev.scan1_is_running: + maxTimeout = 3 * dev.sample_period if 3 * dev.sample_period >= 60 else 60 + if int(self.get_system_uptime_in_s() - dev.last_scan1_time) > maxTimeout: + haveAbormal = True + if dev.enable_cust_sample and dev.cust_polling_maps and dev.first_scan2_finish: + maxTimeout = dev.scan2_is_running or (3 * dev.sample_period2 if 3 * dev.sample_period2 >= 60 else 60) + if int(self.get_system_uptime_in_s() - dev.last_scan2_time) > maxTimeout: + haveAbormal = True + + if haveAbormal: + payload = json.dumps({'message':"abnormal", 'ServiceId':self.service_id}) + else: + payload = json.dumps({'message':"pong", 'ServiceId':self.service_id}) + logger.debug("Driver response service status topic:%s, payload:%s" % (resp_topic, payload)) + self.publish_data(resp_topic, payload, False) + except Exception as error: + try: + logger.error("Driver response service status failed(%s)" % error) + finally: + error = None + del error + + def print_debuglog_tofile(self, controller, logsize, logpath, message): + if not os.path.exists(logpath): + return + debug_log_file = "%s/%s_%s.log" % (logpath, controller["protocol"], self.service_id) + if controller["debugLogNum"] >= logsize / 2: + cmd = "mv %s %s.1" % (debug_log_file, debug_log_file) + os.system(cmd) + controller["debugLogNum"] = 0 + else: + try: + with open(debug_log_file, "a") as file: + file.write("%s\n" % message) + controller["debugLogNum"] += 1 + except Exception as error: + try: + logger.warn("Write debugLog failed(%s)" % error) + finally: + error = None + del error + + def get_file_lines(self, filename): + file_number = 0 + try: + with open(filename, "r") as file: + while file.readline(): + file_number += 1 + + except Exception: + file_number = 0 + + return file_number + + def _get_default_device_name(self): + for controller in self.cfg_info["controllers"]: + return controller + + def print_debug_log(self, direction, message): + controller = self._get_default_device_name() + if not controller is None: + if not controller["debug"]: + if not controller["enableDebug"]: + return + if isinstance(message, bytes): + message = str(message, "utf-8") + debug_log_message = "<%s><%s><%s> %s" % ( + datetime.now().strftime("%Y-%m-%d %H:%M:%S"), controller["endpoint"], "TX" if direction else "RX", message) + if controller["debug"]: + if round(self.get_system_uptime_in_s()) - controller["debug_heartbeat"] > 90: + controller["debug"] = 0 + else: + topic = InternalTopic.DEBUG_LOG_PKGDBG_INFO.format(CtrlName=(controller["name"])) + self.publish_data(topic, debug_log_message, False) + if controller["enableDebug"]: + self.print_debuglog_tofile(controller, self.cfg_info["misc"]["debugLogSize"], self.cfg_info["misc"]["debugLogPath"], debug_log_message) + + def driver_pkgdbg_heartbeat_msg_callback(self, topic, payload): + controller = self._get_default_device_name() + payload = json.loads(payload) + controller["debug_heartbeat"] = round(self.get_system_uptime_in_s()) + + def driver_pkgdbg_config_msg_callback(self, topic, payload): + controller = self._get_default_device_name() + payload = json.loads(payload) + if payload["action"] == "start": + controller["debug"] = 1 + controller["debug_heartbeat"] = round(self.get_system_uptime_in_s()) + else: + controller["debug"] = 0 + resp_topic = topic.replace("httpreq", "httprsp") + payload = json.dumps({"result": "ok"}) + self.publish_data(resp_topic, payload, False) + + def _init_mqclient(self, client_id, controller): + if not self.mqclient: + self.mqclient = MQClientLibevent(self.base, client_id) + if os.path.exists(EC_SYSTEM_INFO): + self.mqclient.set_mq_info(client_id, target_username="admin", target_passwd="admin") + self.mqclient.init_mqclient() + self.mqclient.add_sub(InternalTopic.EVENT_BUS_SOUTH_WRITE.format(requestServiceId="+"), self.on_partner_write_value) + self.mqclient.add_sub(InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PING, self.on_sys_service_status_ping) + controller["debugLogNum"] = self.get_file_lines("%s/%s_%s.log" % ( + self.cfg_info["misc"]["debugLogPath"], controller["protocol"], self.service_id)) + self.mqclient.add_sub(InternalTopic.DEBUG_LOG_PKGDBG_HEARTBEAT.format(CtrlName=(controller["name"])), self.driver_pkgdbg_heartbeat_msg_callback) + self.mqclient.add_sub(InternalTopic.DEBUG_LOG_PKGDBG_CONFIG.format(CtrlName=parse.quote(("%s" % controller["name"].encode("utf-8").decode("utf-8")), safe="-_.~!()*'")), self.driver_pkgdbg_config_msg_callback) + self.mqclient.connect() + + +def main(argv=sys.argv): + ap = argparse.ArgumentParser(description="Drivers Server") + ap.add_argument("-s", "--service_id", action="store", required=True, help="Service id.") + ap.add_argument("-c", "--config", action="store", required=True, help="Config file path.") + args = ap.parse_args(argv[1[:None]]) + print(args.service_id) + print(args.config) + dev = Drivers(args.service_id, args.config) + cfg = dev.load_config() + dev.run_driver(cfg) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/src/drivers/EnIPDriver.py b/APPS_UNCOMPILED/src/drivers/EnIPDriver.py new file mode 100644 index 0000000..b312b53 --- /dev/null +++ b/APPS_UNCOMPILED/src/drivers/EnIPDriver.py @@ -0,0 +1,291 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/drivers/EnIPDriver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 10426 bytes +""" +Drivers +Created on 2021/1/2 +@author: Lius +""" +import time, math, libevent +from pycomm3 import LogixDriver +from common.Logger import logger +from collections import namedtuple +from common.DataType import String, Float +from common.DataType import ENIP_DATA_TYPE_MAPS +FLOAT_REPR_DEFAULT = 2 + +class EnIPDriver: + + def __init__(self, device_param, cont_acquisition, libevent_base): + self.device_param = device_param + self.acquisition_mode = cont_acquisition + self.sample_period = device_param["samplePeriod"] + self.expired = device_param["expired"] + self._publish_measures_data = None + self.variables = dict() + self.polling_maps = list() + self.device = self.get_device_instance(self.device_param) + self.libevent_base = libevent_base + self.read_evt_timer = libevent.Timer((self.libevent_base), (self.read_timer), userdata=None) + + def get_device_instance(self, dev): + try: + logger.info("### create device %s. ###" % dev) + endpoint = dev["endpoint"].split(":") + dev["ip"] = endpoint[0] + dev["port"] = int(endpoint[1]) + device = LogixDriver((dev["ip"]), port=(int(dev["port"]))) + logger.info("### create OK. ###") + except Exception as e: + try: + logger.error("### create error. %s ###" % e) + device = None + finally: + e = None + del e + + return device + + @property + def publish_measures_data(self): + return self._publish_measures_data + + @publish_measures_data.setter + def publish_measures_data(self, func): + self._publish_measures_data = func + + def read_timer(self, evt, userdata): + try: + logger.info("### Regularly polling: %s | ctrl name: %s ###" % ( + self.sample_period, self.device_param["name"])) + timestamp = time.time() + values = dict({'timestamp':int(timestamp), 'name':self.device_param["name"], 'version':self.device_param["version"], + 'health':(self.get_device_connect_status)(), + 'measures':list()}) + try: + start_time = time.time() + values["measures"] = self.read_variables_value(int(timestamp)) + end_time = time.time() + logger.debug("get data time:%s (device:%s)" % ( + end_time - start_time, self.device_param["name"])) + except Exception as e: + try: + logger.warn("### Collect error:%s (ctrl name:%s) ###" % ( + e, self.device_param["name"])) + finally: + e = None + del e + + if self._publish_measures_data: + values["health"] = self.get_device_connect_status() + self._publish_measures_data(values) + else: + use_time = time.time() - timestamp + if use_time > self.sample_period or abs(use_time - self.sample_period) < 0.2: + self.read_evt_timer.add(float(use_time + 0.2)) + else: + self.read_evt_timer.add(float(self.sample_period)) + except Exception as e: + try: + logger.warn("### Timer error. %s ###" % e) + self.read_evt_timer.add(float(self.sample_period)) + finally: + e = None + del e + + logger.info("### Collect End ###\n") + + def get_device_connect_status(self): + isconnect = 0 + if self.device: + isconnect = int(self.device._connection_opened) + return int(isconnect) + + def _reconnect(self): + try: + self.disconnect() + self.device = self.get_device_instance(self.device_param) + if self.device: + return True + except Exception as e: + try: + self.device = None + logger.error("### Reconnect error. %s ###" % e) + finally: + e = None + del e + + return False + + def disconnect(self): + try: + if self.device: + self.device.close() + return True + except Exception as e: + try: + logger.error("### Disconnect error. %s ###" % e) + return False + finally: + e = None + del e + + def __parsing_var__(self, var): + if "unit" in var: + del var["unit"] + if "desc" in var: + del var["desc"] + if "sendType" in var: + del var["sendType"] + var["symbol"] = var["addr"] + var["decimal"] = int(var["decimal"]) if ("decimal" in var and var["decimal"]) else 2 + if "dataType" not in var: + var["dataType"] = None + return (namedtuple("eip_var", var.keys()))(*var.values()) + + def update_variables(self, tags): + """ + tags=[var1, var2, var3] + """ + for tag in tags: + tag_name = tag["name"] + try: + _variable = self.__parsing_var__(tag) + self.variables[tag_name] = _variable + except Exception as e: + try: + logger.error("Update measure:%s (device:%s) error:%s" % (tag_name, self.device_param["name"], e)) + finally: + e = None + del e + + def del_all_variable(self): + self.variables.clear() + self.polling_maps = list() + + def convert_data(self, value, dtype): + if dtype == String: + if not isinstance(value, str): + return str(value) + else: + return value + if dtype == Float: + if not isinstance(value, float): + return "0.0" + return str(value) + return isinstance(value, int) or 0 + return value + + def read_variables_value(self, timestamp): + is_read = True + measures = list() + for tag_name, tag in self.variables.items(): + try: + if not is_read: + continue + elif not self.device: + if self._reconnect() is False: + is_read = False + raise ValueError("EtherNet/IP is not connected.") + else: + logger.debug("Execute(EtherNet/IP) -> symbol: %s" % tag.symbol) + _, value, dtype, err = self.device.read(tag.symbol) + if dtype == "REAL" or dtype == "LREAL": + if math.isnan(float(value)): + value = "NaN" + else: + value = round(value, tag.decimal if tag.decimal else FLOAT_REPR_DEFAULT) + else: + if dtype == "BOOL": + value = 1 if value else 0 + else: + if dtype in ('SINT', 'INT', 'DINT', 'LINT', 'USINT', 'UINT', + 'UDINT'): + pass + else: + try: + if isinstance(value, bytes): + raise ValueError("Not support bytes data.") + value = str(value)[None[:tag.len]] + except Exception as e: + try: + logger.error("str() error: %s, raw data: %s", (e, value)) + value = "" + finally: + e = None + del e + + if err is not None: + raise ValueError(err) + value = self.convert_data(value, tag.dataType) + measures.append({'rawbytes':"", 'value':value, 'health':1, 'timestamp':timestamp, + 'name':tag_name, 'dataType':ENIP_DATA_TYPE_MAPS[dtype]}) + continue + except Exception as e: + try: + logger.error("Read error [EtherNet/IP] @ %s (symbol:%s)" % (e.__str__(), tag.symbol)) + if "Invalid tag request" not in e.__str__(): + if self._reconnect() is False: + is_read = False + finally: + e = None + del e + + measures.append({'rawbytes': '""', 'value': '""', 'health': 0, + 'timestamp': timestamp, 'name': tag_name, 'dataType': '""'}) + + return measures + + def write_variables_value(self, msg): + err_code = 0 + params = "" + if msg["name"] in self.variables: + var = self.variables[msg["name"]] + if "rw" in var.readWrite: + try: + if not self.get_device_connect_status(): + err_code = 1 + params = self.device_param["name"] + raise ValueError("Device %s is not accessible." % self.device_param["name"]) + else: + logger.info("Find Name: %s| Write value: %s| Symbol:%s" % ( + var.name, msg["value"], var.symbol)) + _, _, data_type, err = self.device.read(var.symbol) + if not err: + if data_type == "REAL" or data_type == "LREAL": + msg["value"] = round(float(msg["value"]), var.decimal if var.decimal else FLOAT_REPR_DEFAULT) + else: + if data_type == "BOOL": + msg["value"] = True if msg["value"] == 1 else False + else: + if data_type in ('SINT', 'INT', 'DINT', 'LINT', + 'USINT', 'UINT', 'UDINT', 'ULINT'): + msg["value"] = int(msg["value"]) + _, value, data_type, err = self.device.write((var.symbol, msg["value"])) + logger.debug("%s: %-10s == %s %s %s" % (time.ctime(), var.symbol, value, data_type, err)) + else: + return ( + 1, err) + return (err_code, params, None) + except Exception as e: + try: + err_code = 1 + params = e.__str__() + logger.error("[EtherNet/IP] Write (%s) error : %s" % (var.name, params)) + finally: + e = None + del e + + else: + err_code = 1 + params = var.name + logger.warn("The variable %s does not support write operations." % var.name) + else: + err_code = 1 + logger.error("Variable does not exist.") + return ( + err_code, params, None) diff --git a/APPS_UNCOMPILED/src/drivers/MCDriver.py b/APPS_UNCOMPILED/src/drivers/MCDriver.py new file mode 100644 index 0000000..bb177dd --- /dev/null +++ b/APPS_UNCOMPILED/src/drivers/MCDriver.py @@ -0,0 +1,519 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/drivers/MCDriver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 22678 bytes +""" +Drivers +Created on 2021/4/13 +@author: Lius +""" +import re, math, time, serial, libevent, mcprotocol +from common.Logger import logger +from collections import namedtuple +from common.DataType import String, Float +from common.Protocol import MC1C, MC3C, MC4C, MC3E, MCPS + +class MCDriver: + + def __init__(self, device_param, cont_acquisition, libevent_base, coms): + self.device_param = device_param + self.acquisition_mode = cont_acquisition + self.max_continuous_number = 50 + self.sample_period = device_param["samplePeriod"] + self.expired = device_param["expired"] + self._publish_measures_data = None + self.ctrl_readable = 0 + self.variables = dict() + self.polling_maps = list() + self.coms = coms + self.device = self.get_device_instance(self.device_param) + self.libevent_base = libevent_base + self.read_evt_timer = libevent.Timer((self.libevent_base), (self.read_timer), userdata=None) + + def _gen_serial_model_ins(self, com, expired): + from mobiuspi_lib.serial import Serial as IG_serial + com_args = dict() + for c in self.coms: + if c["name"] == com: + com_args = c + break + + if not com_args: + raise ValueError("Unknown serial port.") + if "serial_ins" not in com_args.keys(): + ig_serial = IG_serial() + port = ig_serial.get_serial_path(com_args["name"]) + logger.info("serial: %s %s %s-%s-%s" % (port, com_args["baud"], com_args["bits"], + com_args["parityChk"].upper(), com_args["stopbits"])) + com_args["serial_ins"] = serial.Serial(port=port, baudrate=(com_args["baud"]), + bytesize=(com_args["bits"]), + parity=(com_args["parityChk"].upper()), + stopbits=(com_args["stopbits"]), + xonxoff=0) + sm = mcprotocol.SerialModel(com_args["serial_ins"]) + sm.set_timeout(expired / 1000) + sm.open() + return sm + + def _gen_frmt_ins(self, format, frame, sum_check): + is_sum_check = True if sum_check == 1 else False + if format == 1: + logger.info("Format 4") + return mcprotocol.PkgFormat4(frame_id=frame, is_sum_check=is_sum_check) + logger.info("Format 1") + return mcprotocol.PkgFormat(frame_id=frame, is_sum_check=is_sum_check) + + def get_device_instance(self, dev): + try: + if "args" in dev: + args = dev["args"] + else: + args = dict() + if "continuousAcquisition" in args: + self.acquisition_mode = args["continuousAcquisition"] + elif "maxContinuousNumber" in args: + self.max_continuous_number = args["maxContinuousNumber"] + if "stationNo" in args: + station_no = int(args["stationNo"], 16) + else: + station_no = 0 + if dev["protocol"] == MC3C: + logger.info("3C") + frmt = self._gen_frmt_ins(args["format"], mcprotocol.THREE_C_FRAME, args["sumCheck"]) + prtl = mcprotocol.Type3C(frmt, self._gen_serial_model_ins(dev["endpoint"], dev["expired"]), station_no) + else: + if dev["protocol"] == MC1C: + logger.info("1C") + frmt = self._gen_frmt_ins(args["format"], mcprotocol.ONE_C_FRAME, args["sumCheck"]) + prtl = mcprotocol.Type1C(frmt, self._gen_serial_model_ins(dev["endpoint"], dev["expired"]), station_no) + else: + if dev["protocol"] == MC4C: + logger.info("4C") + frmt = self._gen_frmt_ins(args["format"], mcprotocol.FOUR_C_FRAME, args["sumCheck"]) + prtl = mcprotocol.Type4C(frmt, self._gen_serial_model_ins(dev["endpoint"], dev["expired"]), station_no) + else: + if dev["protocol"] == MCPS: + logger.info("MCPS") + prtl = mcprotocol.ComFx(self._gen_serial_model_ins(dev["endpoint"], dev["expired"])) + else: + if dev["protocol"] == MC3E: + logger.info("3E") + endpoint = dev["endpoint"].split(":") + dev["ip"] = endpoint[0] + dev["port"] = int(endpoint[1]) + sm = mcprotocol.SockteModel(dev["ip"], dev["port"]) + sm.connect() + sm.set_timeout(dev["expired"] / 1000) + prtl = mcprotocol.Type3E(sm, station_no) + device = prtl + logger.info("### create OK. ###") + except Exception as e: + try: + device = None + logger.error("### create error. %s ###" % e) + finally: + e = None + del e + + return device + + @property + def publish_measures_data(self): + return self._publish_measures_data + + @publish_measures_data.setter + def publish_measures_data(self, func): + self._publish_measures_data = func + + def read_timer(self, evt, userdata): + try: + logger.info("### Regularly polling: %s | ctrl name: %s ###" % ( + self.sample_period, self.device_param["name"])) + timestamp = time.time() + values = dict({'timestamp':int(timestamp), 'name':self.device_param["name"], 'version':self.device_param["version"], + 'health':(self.get_device_connect_status)(), + 'measures':list()}) + try: + start_time = time.time() + values["measures"] = self.read_variables_value(int(timestamp)) + end_time = time.time() + logger.debug("get data time:%s (device:%s)" % ( + end_time - start_time, self.device_param["name"])) + except Exception as e: + try: + logger.warn("### Collect error:%s (ctrl name:%s) ###" % ( + e, self.device_param["name"])) + finally: + e = None + del e + + if self._publish_measures_data: + values["health"] = self.get_device_connect_status() + self._publish_measures_data(values) + else: + use_time = time.time() - timestamp + if use_time > self.sample_period or abs(use_time - self.sample_period) < 0.2: + self.read_evt_timer.add(float(use_time + 0.2)) + else: + self.read_evt_timer.add(float(self.sample_period)) + except Exception as e: + try: + logger.warn("### Timer error. %s ###" % e) + self.read_evt_timer.add(float(self.sample_period)) + finally: + e = None + del e + + logger.info("### Collect End ###\n") + + def get_device_connect_status(self): + isconnect = 0 + if self.device: + try: + isconnect = self.ctrl_readable + except Exception: + pass + + return int(isconnect) + + def disconnect(self): + try: + if self.device: + self.device.channel.close() + return True + except Exception as e: + try: + logger.error("### Disconnect failed. %s ###" % e) + return False + finally: + e = None + del e + + def _reconnect(self): + try: + self.disconnect() + self.device = self.get_device_instance(self.device_param) + if self.device: + return True + except Exception as e: + try: + logger.error("### Reconnect error. %s ###" % e) + finally: + e = None + del e + + self.device = None + return False + + def __get_type_length__Parse error at or near `LOAD_CONST' instruction at offset 328 + + def __parsing_var__(self, var): + if "unit" in var: + del var["unit"] + if "desc" in var: + del var["desc"] + if "sendType" in var: + del var["sendType"] + var["decimal"] = int(var["decimal"]) if ("decimal" in var and var["decimal"]) else 2 + var["len"] = int(var["len"]) if re.match("string", var["dataType"], re.M | re.I) else 1 + var["len"] = self.__get_type_length__(var["dataType"]) * var["len"] + if "C(16bit)" in var["addr"]: + var["register_type"] = "CN" + str_register_addr = var["addr"][8[:None]] + var["addr"] = "CN" + str_register_addr + if "C(32bit)" in var["addr"]: + var["register_type"] = "LCN" + str_register_addr = var["addr"][8[:None]] + var["addr"] = "LCN" + str_register_addr + else: + var["register_type"] = re.search("\\D+", var["addr"]).group(0) + str_register_addr = re.search("\\d.*", var["addr"]).group(0) + if var["register_type"] == "T": + var["register_type"] = "TN" + var["addr"] = "TN" + str_register_addr + var["register_addr"] = int(str_register_addr) + return (namedtuple("mc_var", var.keys()))(*var.values()) + + def __new_polling_dict__(self, addr, start_addr, end_addr, dataType, register_type, length, name): + return {'addr':addr, 'start_addr':start_addr, 'end_addr':end_addr, 'dataType':dataType, 'register_type':register_type, + 'len':length, 'member':set((name,))} + + def refresh_batch_dict(self): + type_addr_maps = dict() + addr_tags_maps = dict() + for tag in self.variables.values(): + if tag.register_type not in type_addr_maps: + type_addr_maps[tag.register_type] = list() + type_addr_maps[tag.register_type].append(tag.register_addr) + if tag.addr not in addr_tags_maps: + addr_tags_maps[tag.addr] = list() + addr_tags_maps[tag.addr].append(tag) + + for r_type, s_addrs in type_addr_maps.items(): + s_addrs.sort() + for addr in s_addrs: + try: + self.add_var_to_polling(addr_tags_maps["%s%s" % (r_type, addr)]) + except Exception as e: + try: + logger.error("Refresh polling dict error. %s" % e) + finally: + e = None + del e + + def update_variables(self, tags): + """ + tags=[var1, var2, var3] + """ + logger.debug("Update measure (tags:%s)" % tags) + for tag in tags: + tag_name = tag["name"] + try: + _variable = self.__parsing_var__(tag) + self.variables[tag_name] = _variable + if self.acquisition_mode == 0: + return + self.refresh_batch_dict() + except Exception as e: + try: + logger.error("Update measure tag:%s (device:%s) error:%s" % ( + tag["name"], self.device_param["name"], e)) + finally: + e = None + del e + + def add_var_to_polling(self, tags): + for mc_var in tags: + start_addr = mc_var.register_addr + end_addr = mc_var.register_addr + mc_var.len - 1 + is_exist = False + for pm in self.polling_maps: + if re.match("string", mc_var.dataType, re.M | re.I): + break + if re.match("string", pm["dataType"], re.M | re.I): + continue + if pm["len"] + mc_var.len > self.max_continuous_number: + continue + if mc_var.register_type != pm["register_type"]: + continue + cmp_min_addr = pm["start_addr"] - mc_var.len + cmp_max_addr = pm["end_addr"] + 1 + if cmp_min_addr <= mc_var.register_addr <= cmp_max_addr: + is_exist = True + if start_addr <= pm["start_addr"]: + pm["len"] += pm["start_addr"] - start_addr + pm["start_addr"] = start_addr + if end_addr >= pm["end_addr"]: + pm["len"] += end_addr - pm["end_addr"] + pm["end_addr"] = end_addr + pm["member"].add(mc_var.name) + break + + if not is_exist: + self.polling_maps.append(self.__new_polling_dict__(mc_var.addr, start_addr, end_addr, mc_var.dataType, mc_var.register_type, mc_var.len, mc_var.name)) + + def del_all_variable(self): + self.variables.clear() + self.polling_maps = list() + + def __bytes_to_raw_data__(self, var, data): + if re.match("bit", var.dataType, re.M | re.I): + return data[0] + if re.match("string", var.dataType, re.M | re.I): + valid_index = 65535 + if 0 in data: + valid_index = data.index(0) + data = bytearray(b'') if valid_index == 0 else data[0[:valid_index]] + data = data.decode("utf-8") + else: + if re.match("dword", var.dataType, re.M | re.I): + return data[0] << 16 | data[1] + else: + if re.match("word", var.dataType, re.M | re.I): + return data[0] + elif re.match("dint", var.dataType, re.M | re.I): + s = (data[0] & 32767) << 16 & 4294901760L | data[1] + if data[0] & 32768 == 0: + return s + return (s | 2147483648L) - 4294967296L + if re.match("int", var.dataType, re.M | re.I): + if data[0] & 32768 == 0: + return data[0] + return data[0] - 65536 + if re.match("float", var.dataType, re.M | re.I): + dword = data[0] << 16 | data[1] + f = (-1) ** ((dword & 2147483648L) >> 31) * 2 ** (((abs(dword) & 2139095040) >> 23) - 127) * (1 + (abs(dword) & 8388607) * 1.0 / 8388608) + res = round(f, var.decimal) + if math.isnan(float(res)): + res = "NaN" + else: + res = str(round(res, var.decimal)) + return res + if re.match("sint", var.dataType, re.M | re.I): + h_byte = (data[0] & 65280) >> 8 + l_byte = data[0] & 255 + h_sint = h_byte if h_byte & 128 == 0 else (h_byte | 128) - 255 + l_sint = l_byte if l_byte & 128 == 0 else (l_byte | 128) - 255 + return [h_sint, l_sint] + if re.match("byte", var.dataType, re.M | re.I): + return [ + (data[0] & 65280) >> 8, data[0] & 255] + raise ValueError("Invalid data type.") + return data + + def __execute_read__(self, tag): + if not self.device: + self._reconnect() + if not self.device: + raise ValueError("Device %s is not accessible." % self.device_param["name"]) + elif re.match("bit", tag["dataType"], re.I): + data, error = self.device.batchread_bitunits(tag["addr"], tag["len"]) + else: + data, error = self.device.batchread_wordunits(tag["addr"], tag["len"]) + return ( + data, error) + + def to_str_bytes(self, bytes_data): + return "".join(map((lambda x: ("\x00x" if len(hex(x)) >= 4 else "\x00x0") + hex(x)[2[:None]]), bytes_data)) + + def convert_data(self, value, dtype): + if dtype == String: + if not isinstance(value, str): + return str(value) + else: + return value + if dtype == Float: + if not isinstance(value, float): + return "0.0" + return str(value) + return isinstance(value, int) or 0 + return value + + def read_single_measure(self, tag_name, device_name, timestamp): + tag = dict(self.variables[tag_name]._asdict()) + try: + logger.debug("Execute(MC) -> addr:%s len:%s device:%s" % (tag["addr"], tag["len"], device_name)) + data, err = self.__execute_read__(tag) + if err: + raise ValueError(err) + raw_data = self.__bytes_to_raw_data__(self.variables[tag_name], data) + self.ctrl_readable = 1 + return {'rawbytes':"", 'value':(self.convert_data)(raw_data, tag["dataType"]), 'health':1, + 'timestamp':timestamp, 'name':tag_name} + except Exception as e: + try: + logger.error("Read error [MC] @ %s (device:%s tag:%s)" % (e, device_name, tag["name"])) + finally: + e = None + del e + + return {'rawbytes':"", + 'value':(self.convert_data)("", tag["dataType"]), 'health':0, + 'timestamp':timestamp, 'name':tag_name} + + def read_variables_value(self, timestamp): + measures = list() + self.ctrl_readable = 0 + device_name = self.device_param["name"] + if self.acquisition_mode == 0: + for tag_name in self.variables.keys(): + measures.append(self.read_single_measure(tag_name, device_name, timestamp)) + + return measures + for tag in self.polling_maps: + try: + logger.debug("Execute(MC)s -> addr:%s len:%s device:%s" % (tag["addr"], tag["len"], device_name)) + data, err = self.__execute_read__(tag) + if err: + raise ValueError(err) + for tag_name in tag["member"]: + var = self.variables[tag_name] + var_addr_offset = var.register_addr - tag["start_addr"] + if re.match("string", var.dataType, re.M | re.I): + bytes_data = data + else: + bytes_data = data[var_addr_offset[:var_addr_offset + var.len]] + raw_data = self.__bytes_to_raw_data__(var, bytes_data) + self.ctrl_readable = 1 + measures.append({'rawbytes':"", 'value':(self.convert_data)(raw_data, var.dataType), 'health':1, + 'timestamp':timestamp, 'name':tag_name}) + + continue + except Exception as e: + try: + logger.debug("Batch read error [MC] @ %s" % e.__str__()) + if "No response data" not in e.__str__(): + for tag_name in tag["member"]: + measures.append(self.read_single_measure(tag_name, device_name, timestamp)) + + finally: + e = None + del e + + for tag_name in tag["member"]: + var = self.variables[tag_name] + measures.append({'rawbytes':"", 'value':(self.convert_data)("", var.dataType), 'health':0, + 'timestamp':timestamp, 'name':tag_name}) + + return measures + + def write_variables_value(self, msg): + err_code = 0 + params = "" + if msg["name"] in self.variables[msg["name"]]: + tag = self.variables[msg["name"]] + try: + if "rw" in tag.readWrite: + logger.info("Find Name: %s| Write value: %s| Addr:%s" % ( + tag.name, msg["value"], tag.addr)) + if not (self.device and self.get_device_connect_status()): + self._reconnect() + if self.device: + if not self.get_device_connect_status(): + err_code = 1 + params = self.device_param["name"] + raise ValueError("Device %s is not accessible." % self.device_param["name"]) + elif re.match("bit", tag.dataType, re.M | re.I): + _, error = self.device.batchwrite_bitunits(tag.addr, [int(msg["value"])]) + else: + if re.match("string", tag.dataType, re.M | re.I): + str_value = list() + for v in [msg["value"][i[:i + 2]] for i in range(0, len(msg["value"]), 2)]: + h_byte = ord(v[0]) + l_byte = 0 if len(v) == 1 else ord(v[1]) + str_value.append(h_byte << 8 & 65280 | l_byte) + + _, error = self.device.batchwrite_wordunits(tag.addr, str_value) + else: + if re.match("float", tag.dataType, re.M | re.I): + msg["value"] = round(float(msg["value"]), tag["decimal"] if tag["decimal"] else 2) + _, error = self.device.batchwrite_wordunits(tag.addr, [msg["value"]]) + else: + msg["value"] = int(msg["value"]) + _, error = self.device.batchwrite_wordunits(tag.addr, [msg["value"]]) + if error: + return ( + 1, error) + return ( + err_code, params, None) + err_code = 1 + params = tag.name + logger.warn("The measure %s does not support write operations." % tag.name) + except Exception as e: + try: + err_code = 1 + params = e.__str__() + logger.error("[MC] Write (%s) error : %s" % (tag.name, params)) + finally: + e = None + del e + + else: + err_code = 1 + params = "Measure does not exist." + logger.error("Measure does not exist.") + return ( + err_code, params, None) \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/drivers/OpcDaDriver.py b/APPS_UNCOMPILED/src/drivers/OpcDaDriver.py new file mode 100644 index 0000000..5623b5b --- /dev/null +++ b/APPS_UNCOMPILED/src/drivers/OpcDaDriver.py @@ -0,0 +1,711 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/drivers/OpcDaDriver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 29827 bytes +""" +Drivers +Created on 2023/4/15 +@author: LLC +""" +import re, time, socket, libevent, OpenOPC +from common.Logger import logger +from collections import namedtuple +from drivers.Drivers import POLLING_SECOND, POLLING_MILLISECOND, POLLING_SECOND2 + +class OpcDaDriver: + + def __init__(self, device_param, cont_acquisition, libevent_base, debug_log_callback): + self.controller_health = 0 + self.controller_polling1_status = 0 + self.controller_polling2_status = 0 + self.device_param = device_param + self.acquisition_mode = cont_acquisition + self.sample_period = device_param["samplePeriod"] + self.first_scan1_finish = False + self.scan1_is_running = False + self.last_scan1_time = self.get_system_uptime_in_s() + self.expired = device_param["expired"] + self.enable_msec_sample = device_param["args"]["enableMsecSample"] + if self.enable_msec_sample: + self.msec_sample_period = device_param["args"]["msecSamplePeriod"] + self.msec_package = device_param["args"]["msecPackage"] + self.enable_cust_sample = device_param["enablepollCycle"] + if self.enable_cust_sample: + self.sample_period2 = device_param["samplePeriod2"] + self.first_scan2_finish = False + self.scan2_is_running = False + self.last_scan2_time = self.get_system_uptime_in_s() + self._publish_measures_data = None + self.max_continuous_number = 2 + self.variables = dict() + self.variables_value = dict() + self.polling_maps = list() + self.cust_polling_maps = list() + self.msec_polling_maps = list() + self.controller_jsonpack = list() + self.controller_jsonpack_number = 0 + self.debug_log_callback = debug_log_callback + self.device = self.get_device_instance(self.device_param) + self.libevent_base = libevent_base + self.read_evt_timer = libevent.Timer((self.libevent_base), (self.read_timer), userdata=None) + if self.enable_msec_sample: + self.msec_read_evt_timer = libevent.Timer((self.libevent_base), (self.msec_read_timer), userdata=None) + if self.enable_cust_sample: + self.cust_read_evt_timer = libevent.Timer((self.libevent_base), (self.cust_read_timer), userdata=None) + + def get_system_uptime_in_s(self): + return time.clock_gettime(time.CLOCK_MONOTONIC) + + def get_device_instance(self, dev): + if "connectTimeOut" not in dev: + dev["connectTimeOut"] = 10000 + elif "continuousAcquisition" in dev["args"]: + self.acquisition_mode = dev["args"]["continuousAcquisition"] + if "maxContinuousNumber" in dev["args"]: + self.max_continuous_number = dev["args"]["maxContinuousNumber"] + logger.info("%s serverURL:%s" % (dev["protocol"], dev["endpoint"])) + ip = dev["endpoint"].split(":")[0] + svrname = dev["endpoint"].split(":")[1] + try: + device = OpenOPC.open_client(ip, debug_log=(self.debug_log_callback)) + logger.info("The servers available for connection have: %s" % device.servers()) + device.connect(svrname, ip) + logger.info("%s connect success" % dev["protocol"]) + return device + except Exception as error: + try: + logger.error("%s connect failed(%s)" % (dev["protocol"], error)) + finally: + error = None + del error + + @property + def publish_measures_data(self): + return self._publish_measures_data + + @publish_measures_data.setter + def publish_measures_data(self, func): + self._publish_measures_data = func + + def msec_read_timer(self, evt, userdata): + try: + timestamp = time.time() + values = dict({'controller':[self.device_param["name"], self.controller_health, round(timestamp, 6)], 'measure':dict()}) + try: + start_time = self.get_system_uptime_in_s() + values["measure"] = self.read_msec_variables_value() + end_time = self.get_system_uptime_in_s() + logger.debug("mesc read %s measures usetime:%sS" % (self.device_param["name"], end_time - start_time)) + values["controller"][1] = self.controller_health + self.controller_jsonpack.append(values) + self.controller_jsonpack_number = self.controller_jsonpack_number + 1 + except Exception as error: + try: + logger.warn("mesc read %s measures failed(%s)" % (self.device_param["name"], error)) + finally: + error = None + del error + + if self.controller_jsonpack_number >= self.msec_package: + if self._publish_measures_data: + self._publish_measures_data(self.controller_jsonpack, POLLING_MILLISECOND, self.device_param["name"]) + self.controller_jsonpack = list() + self.controller_jsonpack_number = 0 + else: + use_time = self.get_system_uptime_in_s() - start_time + if use_time >= self.msec_sample_period / 1000: + self.msec_read_evt_timer.add(0.01) + else: + self.msec_read_evt_timer.add(float(self.msec_sample_period / 1000 - use_time)) + except Exception as error: + try: + logger.warn("Regularly millisecond polling controller failed(%s)" % error) + self.msec_read_evt_timer.add(float(self.msec_sample_period / 1000)) + finally: + error = None + del error + + logger.debug("Regularly millisecond polling end") + + def read_timer(self, evt, userdata): + try: + self.scan1_is_running = True + logger.debug("Regularly polling (1) controller %s(period:%s)" % (self.device_param["name"], self.sample_period)) + values = dict({'name':self.device_param["name"], 'version':self.device_param["version"], 'health':self.controller_health, + 'timestamp':int(time.time()), 'measures':list()}) + try: + start_time = self.get_system_uptime_in_s() + values["measures"] = self.read_variables_value(self.polling_maps, POLLING_SECOND) + end_time = self.get_system_uptime_in_s() + logger.debug("read %s measures usetime:%sS" % (self.device_param["name"], end_time - start_time)) + except Exception as error: + try: + logger.warn("read %s measures failed(%s)" % (self.device_param["name"], error)) + finally: + error = None + del error + + values["timestamp"] = int(time.time()) + if self._publish_measures_data: + values["health"] = self.controller_health + self._publish_measures_data(values, POLLING_SECOND, None) + else: + use_time = self.get_system_uptime_in_s() - start_time + logger.debug("Regularly polling (1) all controller scan end, usetime: %s s" % use_time) + if int(use_time) > self.sample_period: + self.read_evt_timer.add(float(use_time + 0.2)) + else: + self.read_evt_timer.add(float(use_time + (self.sample_period - use_time))) + except Exception as error: + try: + logger.warn("Regularly polling (1) controller failed(%s)" % error) + self.read_evt_timer.add(float(self.sample_period)) + finally: + error = None + del error + + self.first_scan1_finish = True + self.scan1_is_running = False + self.last_scan1_time = self.get_system_uptime_in_s() + logger.debug("Regularly polling (1) end") + + def cust_read_timer(self, evt, userdata): + try: + self.scan2_is_running = True + logger.debug("Regularly polling (2) controller %s(period:%s)" % (self.device_param["name"], self.sample_period2)) + values = dict({'name':self.device_param["name"], 'version':self.device_param["version"], 'health':self.controller_health, + 'timestamp':int(time.time()), 'measures':list()}) + try: + start_time = self.get_system_uptime_in_s() + values["measures"] = self.read_variables_value(self.cust_polling_maps, POLLING_SECOND2) + end_time = self.get_system_uptime_in_s() + logger.debug("read %s measures usetime:%sS" % (self.device_param["name"], end_time - start_time)) + except Exception as error: + try: + logger.warn("read %s measures failed(%s)" % (self.device_param["name"], error)) + finally: + error = None + del error + + values["timestamp"] = int(time.time()) + if self._publish_measures_data: + values["health"] = self.controller_health + self._publish_measures_data(values, POLLING_SECOND2, None) + else: + use_time = self.get_system_uptime_in_s() - start_time + logger.debug("Regularly polling (2) all controller scan end, usetime: %s s" % use_time) + if int(use_time) > self.sample_period2: + self.cust_read_evt_timer.add(float(use_time + 0.2)) + else: + self.cust_read_evt_timer.add(float(use_time + (self.sample_period2 - use_time))) + except Exception as error: + try: + logger.warn("Regularly polling (2) controller failed(%s)" % error) + self.cust_read_evt_timer.add(float(self.sample_period2)) + finally: + error = None + del error + + self.first_scan2_finish = True + self.scan2_is_running = False + self.last_scan2_time = self.get_system_uptime_in_s() + logger.debug("Regularly polling (2) end") + + def read_msec_variables_value(self): + measure_polling_times = 0 + controller_health = 0 + if not self.controller_health: + self.device = None + measures = dict() + if not self.device: + if self._reconnect() is False: + self.controller_health = 0 + for tags in self.msec_polling_maps: + for tag in tags["member"]: + measures[tag["name"]] = [ + 0, ""] + + return measures + device_name = self.device_param["name"] + for tags in self.msec_polling_maps: + if self.acquisition_mode == 0: + for tag in tags["member"]: + measure_polling_times += 1 + single_measure = self.read_single_msec_measure(tag, device_name) + if single_measure[0] == 1: + controller_health = 1 + measures[tag["name"]] = single_measure + + else: + try: + addrs = list() + for tag in tags["member"]: + addr = tag["addr"] + addrs.append(addr) + + data_info_list = self.__execute_read__(addrs) + for i in range(0, len(tags["member"])): + tag_name = tags["member"][i]["name"] + value = data_info_list[i][1] + if self.variables[tag_name].dataType in ('BYTE', 'SINT', 'WORD', + 'INT', 'DWORD', + 'DINT', 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[tag_name].enableBit: + raw_data = self.get_bit_val(int(value), self.variables[tag_name].bitIndex) + self.variables_value[tag_name] = self.__bytes_to_raw_data__(self.variables[tag_name], int(value)) + else: + raw_data = self.__bytes_to_raw_data__(self.variables[tag_name], value) + self.variables_value[tag_name] = raw_data + health = 0 + if data_info_list[i][2] == "Good": + health = 1 + measures[tags["member"][i]["name"]] = [ + health, raw_data] + + measure_polling_times += 1 + controller_health = 1 + continue + except socket.timeout: + self._reconnect() + raise TimeoutError("Read timeout.(socket)") + except Exception as e: + try: + logger.error("Read error [OPC_DA] @ %s (start:%s end:%s)" % (e.__str__(), + tags["start"], + tags["end"])) + if "rejected: unknown object" in e.__str__(): + self._reconnect() + finally: + e = None + del e + + for tag in tags["member"]: + measure_polling_times += 1 + measures[tag["name"]] = [0, ""] + + self.controller_health = controller_health + return measures + + def read_variables_value(self, polling_maps, pollingType): + measure_polling_times = 0 + if not self.controller_health: + self.device = None + measures = list() + if not self.device: + if self._reconnect() is False: + self.controller_health = 0 + for tags in polling_maps: + for tag in tags["member"]: + measures.append({'rawbytes':"", + 'value':"", 'health':0, 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tag["name"]]).measureIndex, + 'name':tag["name"]}) + + return measures + device_name = self.device_param["name"] + for tags in polling_maps: + if self.acquisition_mode == 0: + for tag in tags["member"]: + measure_polling_times += 1 + single_measure = self.read_single_measure(tag, device_name) + measures.append(single_measure) + + else: + try: + addrs = list() + for tag in tags["member"]: + addr = tag["addr"] + addrs.append(addr) + + data_info_list = self.__execute_read__(addrs) + for i in range(0, len(tags["member"])): + tag_name = tags["member"][i]["name"] + value = data_info_list[i][1] + if self.variables[tag_name].dataType in ('BYTE', 'SINT', 'WORD', + 'INT', 'DWORD', + 'DINT', 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[tag_name].enableBit: + raw_data = self.get_bit_val(int(value), self.variables[tag_name].bitIndex) + self.variables_value[tag_name] = self.__bytes_to_raw_data__(self.variables[tag_name], int(value)) + else: + raw_data = self.__bytes_to_raw_data__(self.variables[tag_name], value) + self.variables_value[tag_name] = raw_data + health = 0 + if data_info_list[i][2] == "Good": + health = 1 + measures.append({'rawbytes':"", + 'value':raw_data, 'health':health, 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tags["member"][i]["name"]]).measureIndex, + 'name':tags["member"][i]["name"]}) + + measure_polling_times += 1 + continue + except socket.timeout: + self._reconnect() + raise TimeoutError("Read timeout.(socket)") + except Exception as e: + try: + logger.error("Read error [OPC_DA] @ %s (start:%s end:%s)" % (e.__str__(), + tags["start"], + tags["end"])) + if "rejected: unknown object" in e.__str__(): + self._reconnect() + finally: + e = None + del e + + for tag in tags["member"]: + measure_polling_times += 1 + measures.append({'rawbytes':"", + 'value':"", 'health':0, 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tag["name"]]).measureIndex, + 'name':tag["name"]}) + + if pollingType == POLLING_SECOND: + self.controller_polling1_status = 0 + else: + if pollingType == POLLING_SECOND2: + self.controller_polling2_status = 0 + else: + for measure in measures: + if measure["health"] == 1: + if pollingType == POLLING_SECOND: + self.controller_polling1_status = 1 + else: + if pollingType == POLLING_SECOND2: + self.controller_polling2_status = 1 + break + + if self.controller_polling1_status == 1 or self.controller_polling2_status == 1: + self.controller_health = 1 + else: + self.controller_health = 0 + return measures + + def read_single_measure(self, tag, device_name): + try: + logger.debug("Execute(OPC-DA], read %s addr:%s", tag["name"], tag["addr"]) + addr = [tag["addr"]] + data_info = self.__execute_read__(addr) + tag_name = tag["name"] + value = data_info[0][1] + if self.variables[tag_name].dataType in ('BYTE', 'SINT', 'WORD', 'INT', + 'DWORD', 'DINT', 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[tag_name].enableBit: + raw_data = self.get_bit_val(int(value), self.variables[tag_name].bitIndex) + self.variables_value[tag_name] = self.__bytes_to_raw_data__(self.variables[tag_name], int(value)) + else: + raw_data = self.__bytes_to_raw_data__(self.variables[tag_name], value) + self.variables_value[tag_name] = raw_data + health = 0 + if data_info[0][2] == "Good": + health = 1 + return {'rawbytes':"", + 'value':raw_data, 'health':health, + 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tag["name"]]).measureIndex, + 'name':tag["name"]} + except Exception as e: + try: + logger.error("Read error [OPC_DA] @ %s (device:%s tag:%s)" % (e, device_name, tag["name"])) + finally: + e = None + del e + + return {'rawbytes':"", + 'value':"", 'health':0, 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tag["name"]]).measureIndex, + 'name':tag["name"]} + + def read_single_msec_measure(self, tag, device_name): + try: + logger.debug("Execute(OPC-DA], mesc read %s addr:%s", tag["name"], tag["addr"]) + addr = [tag["addr"]] + data_info = self.__execute_read__(addr) + tag_name = tag["name"] + value = data_info[0][1] + if self.variables[tag_name].dataType in ('BYTE', 'SINT', 'WORD', 'INT', + 'DWORD', 'DINT', 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[tag_name].enableBit: + raw_data = self.get_bit_val(int(value), self.variables[tag_name].bitIndex) + self.variables_value[tag_name] = self.__bytes_to_raw_data__(self.variables[tag_name], int(value)) + else: + raw_data = self.__bytes_to_raw_data__(self.variables[tag_name], value) + self.variables_value[tag_name] = raw_data + health = 0 + if data_info[0][2] == "Good": + health = 1 + return [ + health, raw_data] + except Exception as e: + try: + logger.error("Read msec error [OPC_DA] @ %s (device:%s tag:%s)" % (e, device_name, tag["name"])) + finally: + e = None + del e + + return [ + 0, ""] + + def get_bit_val(self, byte, index): + if byte & 1 << index: + return 1 + return 0 + + def set_bit_val(self, byte, index, val): + if val: + return byte | 1 << index + return byte & ~(1 << index) + + def __bytes_to_raw_data__(self, var, raw_data): + data = None + try: + if not isinstance(raw_data, str): + if isinstance(raw_data, int) or isinstance(raw_data, float): + if re.match("float", var.dataType, re.M | re.I): + data = float(raw_data) + else: + if re.match("double", var.dataType, re.M | re.I): + data = float(raw_data) + else: + if re.match("string", var.dataType, re.M | re.I): + data = str(raw_data) + else: + data = int(raw_data) + elif isinstance(raw_data, bool): + if re.match("float", var.dataType, re.M | re.I): + data = 1.0 if raw_data else 0.0 + else: + if re.match("double", var.dataType, re.M | re.I): + data = 1.0 if raw_data else 0.0 + else: + if re.match("string", var.dataType, re.M | re.I): + data = str(raw_data) + else: + data = 1 if raw_data else 0 + else: + if re.match("string", var.dataType, re.M | re.I): + data = str(raw_data) + else: + logger.error("data type is not string, but data(%s) is %s" % (type(raw_data), raw_data)) + except Exception as e: + try: + logger.error("error: %s, %s", e, e.__traceback__.tb_lineno) + finally: + e = None + del e + + return data + + def __execute_read__(self, addrs): + data_info = self.device.read(addrs) + return data_info + + def get_device_connect_status(self): + isconnect = 0 + if self.device: + try: + self.device.info() + isconnect = 1 + except Exception: + isconnect = 0 + + return int(isconnect) + + def disconnect(self): + try: + if self.device: + logger.info("Disconnect controller(%s)" % self.device_param["name"]) + self.device.close() + self.device = None + return True + except Exception as error: + try: + logger.error("Disconnect failed(%s)" % error) + return False + finally: + error = None + del error + + def _reconnect(self): + try: + self.disconnect() + self.device = self.get_device_instance(self.device_param) + if self.device: + return True + except Exception as e: + try: + logger.error("### Reconnect error. %s ###" % e) + finally: + e = None + del e + + return False + + def updata_polling_maps(self): + step = 1 + if self.acquisition_mode: + step = self.max_continuous_number + measure_list = list() + msec_measure_list = list() + cust_measure_list = list() + for tag_name, tag in self.variables.items(): + tmp = dict() + tmp["name"] = tag_name + tmp["addr"] = tag.addr + if self.enable_cust_sample: + if tag.pollCycle: + cust_measure_list.append(tmp) + else: + measure_list.append(tmp) + if self.enable_msec_sample and tag.msecSample: + msec_measure_list.append(tmp) + + for i in range(0, len(measure_list), step): + member = measure_list[i[:i + step]] + poll_block = {'start':i, 'end':i + step, 'member':member} + self.polling_maps.append(poll_block) + + for i in range(0, len(msec_measure_list), step): + member = msec_measure_list[i[:i + step]] + poll_block = {'start':i, 'end':i + step, 'member':member} + self.msec_polling_maps.append(poll_block) + + for i in range(0, len(cust_measure_list), step): + member = cust_measure_list[i[:i + step]] + poll_block = {'start':i, 'end':i + step, 'member':member} + self.cust_polling_maps.append(poll_block) + + def __parsing_var__(self, var): + if "unit" in var: + del var["unit"] + if "desc" in var: + del var["desc"] + if "uploadType" in var: + del var["uploadType"] + if "dataType" not in var: + var["dataType"] = None + return (namedtuple("opcda_drv", var.keys()))(*var.values()) + + def update_variables(self, tags): + """ + tags=[var1, var2, var3] + """ + for tag in tags: + tag_name = tag["name"] + try: + _variable = self.__parsing_var__(tag) + self.variables[tag_name] = _variable + self.variables_value[tag_name] = 0 + except Exception as e: + try: + logger.error("Update measure:%s (device:%s) error:%s" % ( + tag_name, self.device_param["name"], e)) + finally: + e = None + del e + + self.updata_polling_maps() + + def write_variables_value(self, msg): + err_code = 0 + params = "" + if msg["name"] in self.variables[msg["name"]]: + tag = self.variables[msg["name"]] + try: + if "rw" in tag.readWrite: + logger.debug("Find Name: %s| Write value: %s" % ( + tag.name, msg["value"])) + if self.device: + if self.controller_health == 0: + err_code = 1 + params = self.device_param["name"] + raise ValueError("Device %s is not accessible." % self.device_param["name"]) + if self.variables[msg["name"]].dataType in ('BYTE', 'SINT', + 'WORD', 'INT', + 'DWORD', 'DINT', + 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[msg["name"]].enableBit: + data = self.set_bit_val(self.variables_value[msg["name"]], self.variables[msg["name"]].bitIndex, msg["value"]) + else: + data = msg["value"] + if re.match("bool", tag.dataType, re.M | re.I) or re.match("bit", tag.dataType, re.M | re.I): + if isinstance(data, str): + if data in ('0', '1', 'True', 'False'): + data = eval(data) + elif "true" == data.lower(): + data = True + else: + if "false" == data.lower(): + data = False + else: + if re.match("string", tag.dataType, re.M | re.I): + if len(data) > tag.len: + data = data[None[:tag.len]] + elif isinstance(data, str): + data = eval(data) + value = None + res = self.device.write((tag.addr, data)) + if res != "Success": + logger.error("OPCDA write %s error: %s" % (msg["name"], res)) + err_code = 1 + params = res + else: + data_info = self.__execute_read__([tag.addr]) + value = self.__bytes_to_raw_data__(self.variables[tag.name], data_info[0][1]) + return (err_code, params, value) + err_code = 1 + params = tag.name + logger.warn("The measure %s does not support write operations." % tag.name) + except Exception as e: + try: + err_code = 1 + params = e.__str__() + logger.error("[OPC-DA] Write (%s) error : %s" % (tag.name, params)) + finally: + e = None + del e + + else: + err_code = 1 + params = "Measure does not exist." + logger.error("Measure does not exist.") + return ( + err_code, params, None) + + def recall_variables_value(self, msg): + measures = list() + try: + recall_polling_maps = list() + cust_measure_list = list() + for meaName in msg["measures"]: + if meaName in self.variables: + tag = self.variables[meaName] + tmp = dict() + tmp["name"] = meaName + tmp["addr"] = tag.addr + cust_measure_list.append(tmp) + else: + logger.error("Measure (%s) does not exist." % meaName) + + step = 1 + if self.acquisition_mode: + step = self.max_continuous_number + for i in range(0, len(cust_measure_list), step): + member = cust_measure_list[i[:i + step]] + poll_block = {'start':i, 'end':i + step, 'member':member} + recall_polling_maps.append(poll_block) + + measures = self.read_variables_value(recall_polling_maps, POLLING_SECOND2) + except Exception as e: + try: + params = e.__str__() + logger.error("[OPC-DA] Recall read (%s) error : %s" % (msg["measures"], params)) + finally: + e = None + del e + + return measures diff --git a/APPS_UNCOMPILED/src/drivers/OpcUaDriver.py b/APPS_UNCOMPILED/src/drivers/OpcUaDriver.py new file mode 100644 index 0000000..9856f23 --- /dev/null +++ b/APPS_UNCOMPILED/src/drivers/OpcUaDriver.py @@ -0,0 +1,527 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/drivers/OpcUaDriver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 21868 bytes +""" +Drivers +Created on 2020/12/31 +@author: Lius +""" +import time, math, socket, libevent, datetime, concurrent +from opcua import ua +from common.Logger import logger +from collections import namedtuple +from opcua import Client as UaClient +from concurrent.futures import Future +from common.DataType import String, Float +from mobiuspi_lib.systeminfo import SystemInfo +from common.DataType import OPCUA_DATA_TYPE_MAPS +from opcua.ua.ua_binary import struct_from_binary, struct_to_binary +FLOAT_REPR_DEFAULT = 2 + +class OpcUaDriver: + + def __init__(self, device_param, cont_acquisition, libevent_base): + self.controller_health = 0 + self.device_param = device_param + self.acquisition_mode = cont_acquisition + self.sample_period = device_param["samplePeriod"] + self.expired = device_param["expired"] + self._publish_measures_data = None + self.max_continuous_number = 50 + self.variables = dict() + self.polling_maps = dict() + self.device = self.get_device_instance(self.device_param) + self.libevent_base = libevent_base + self.read_evt_timer = libevent.Timer((self.libevent_base), (self.read_timer), userdata=None) + + def get_device_instance(self, dev): + if "timeout" not in dev: + dev["timeout"] = 1000 + elif "continuousAcquisition" in dev["args"]: + self.acquisition_mode = dev["args"]["continuousAcquisition"] + if "maxContinuousNumber" in dev["args"]: + self.max_continuous_number = dev["args"]["maxContinuousNumber"] + logger.info("%s serverURL:%s" % (dev["protocol"], dev["endpoint"])) + device = UaClient(dev["endpoint"]) + try: + client_session_nonce = bytes((SystemInfo().get_serial_number()), encoding="utf8").zfill(32) + device.connect(client_session_nonce) + device.load_type_definitions() + logger.info("%s connect success" % dev["protocol"]) + return device + except Exception as error: + try: + logger.error("%s connect failed(%s)" % (dev["protocol"], error)) + finally: + error = None + del error + + @property + def publish_measures_data(self): + return self._publish_measures_data + + @publish_measures_data.setter + def publish_measures_data(self, func): + self._publish_measures_data = func + + def read_timer(self, evt, userdata): + try: + logger.debug("Regularly polling controller %s(period:%s)" % (self.device_param["name"], self.sample_period)) + timestamp = time.time() + values = dict({'name':self.device_param["name"], 'version':self.device_param["version"], 'health':self.controller_health, + 'timestamp':int(timestamp), 'measures':list()}) + try: + start_time = time.time() + values["measures"] = self.read_variables_value(int(timestamp)) + end_time = time.time() + logger.debug("read %s measures usetime:%sS" % (self.device_param["name"], end_time - start_time)) + except Exception as error: + try: + logger.warn("read %s measures failed(%s)" % (self.device_param["name"], error)) + finally: + error = None + del error + + if self._publish_measures_data: + values["health"] = self.controller_health + self._publish_measures_data(values) + use_time = time.time() - timestamp + self.read_evt_timer.add(float(use_time + self.sample_period)) + except Exception as error: + try: + logger.warn("Regularly polling controller failed(%s)" % error) + self.read_evt_timer.add(float(self.sample_period)) + finally: + error = None + del error + + logger.debug("Regularly polling end") + + def get_device_connect_status(self): + isconnect = 0 + if self.device: + isconnect = self.device.uaclient._uasocket._connection.is_open() + return int(isconnect) + + def disconnect(self): + try: + if self.device: + logger.info("Disconnect controller(%s)" % self.device_param["name"]) + self.device.disconnect() + self.device = None + return True + except Exception as error: + try: + logger.error("Disconnect failed(%s)" % error) + return False + finally: + error = None + del error + + def _reconnect(self): + try: + self.disconnect() + self.device = self.get_device_instance(self.device_param) + if self.device: + if self.polling_maps: + if self.get_device_connect_status(): + self.rebuild_read_command() + return True + except Exception as e: + try: + logger.error("### Reconnect error. %s ###" % e) + finally: + e = None + del e + + return False + + def __get_nodeid_and_idx__(self, nodeid): + if "[" in nodeid: + if nodeid.endswith("]"): + nodeid_list = nodeid.split("[") + nodeid = nodeid_list[0] + idx = int(nodeid_list[1][None[:-1]]) + return (nodeid, idx) + return ( + nodeid, None) + + def __parsing_var__(self, var): + if "unit" in var: + del var["unit"] + if "desc" in var: + del var["desc"] + if "uploadType" in var: + del var["uploadType"] + var["node_id"], var["value_idx"] = self.__get_nodeid_and_idx__(var["addr"]) + if "dataType" not in var: + var["dataType"] = None + return (namedtuple("opcua_var", var.keys()))(*var.values()) + + def update_variables(self, tags): + """ + tags=[var1, var2, var3] + """ + for tag in tags: + tag_name = tag["name"] + try: + _variable = self.__parsing_var__(tag) + self.variables[tag_name] = _variable + except Exception as e: + try: + logger.error("Update measure:%s (device:%s) error:%s" % ( + tag_name, self.device_param["name"], e)) + finally: + e = None + del e + + self.update_read_commands() + + def update_read_commands(self): + param = ua.ReadParameters() + tag_ids = [] + exist_continue_nodeid = [] + for tag_name, tag in self.variables.items(): + if tag.node_id not in exist_continue_nodeid: + rv = ua.ReadValueId() + rv.NodeId = ua.NodeId.from_string(tag.node_id) + rv.AttributeId = ua.AttributeIds.Value + param.NodesToRead.append(rv) + exist_continue_nodeid.clear() + exist_continue_nodeid.append(tag.node_id) + tag_ids.append(tag_name) + + self.polling_maps = {'read_command':{}, 'id':None, 'request':{}, 'param':param} + if self.device: + self.__from_nodeids_to_binrequst__(param) + self.polling_maps["id"] = tag_ids + + def rebuild_read_command(self): + param = self.polling_maps["param"] + self.__from_nodeids_to_binrequst__(param) + + def __from_nodeids_to_binrequst__(self, parameters): + node_lenght = len(parameters.NodesToRead) + for i in range(0, node_lenght, self.max_continuous_number): + NodesToRead = parameters.NodesToRead[i[:i + self.max_continuous_number]] + param = ua.ReadParameters() + param.NodesToRead = NodesToRead + request = ua.ReadRequest() + request.Parameters = param + with self.device.uaclient._uasocket._lock: + timeout = int(self.device_param["expired"]) + request.RequestHeader = self.device.uaclient._uasocket._create_request_header(timeout) + logger.debug("Requst sending: %s", request) + try: + binreq = struct_to_binary(request) + except Exception: + self.device.uaclient._uasocket._request_handle -= 1 + raise + + self.polling_maps["read_command"][str(i)] = binreq + self.polling_maps["request"][str(i)] = request + + self.polling_maps["param"] = parameters + + def __get_nodes_data__(self): + Results = [] + for idx in self.polling_maps["read_command"].keys(): + message_type = ua.MessageType.SecureMessage + callback = None + with self.device.uaclient._uasocket._lock: + self.device.uaclient._uasocket._request_id += 1 + _request_id = self.device.uaclient._uasocket._request_id + future = Future() + if callback: + future.add_done_callback(callback) + self.device.uaclient._uasocket._callbackmap[_request_id] = future + if self.device.uaclient._uasocket._connection.next_security_token.TokenId != 0: + self.device.uaclient._uasocket._connection.revolve_tokens() + param = self.polling_maps["read_command"][idx] + msg = self.device.uaclient._uasocket._connection.message_to_binary(param, message_type=message_type, + request_id=_request_id) + self.device.uaclient._uasocket._socket.socket.settimeout(5) + self.device.uaclient._uasocket._socket.write(msg) + self.device.uaclient._uasocket._socket.socket.settimeout(None) + request = self.polling_maps["request"][idx] + if not callback: + data = future.result(self.device.uaclient._uasocket.timeout) + self.device.uaclient._uasocket.check_answer(data, " in response to " + request.__class__.__name__) + response = struct_from_binary(ua.ReadResponse, data) + logger.debug(response) + response.ResponseHeader.ServiceResult.check() + for idx, rv in enumerate(self.polling_maps["request"][idx].Parameters.NodesToRead): + if rv.AttributeId == ua.AttributeIds.NodeClass: + dv = response.Results[idx] + if dv.StatusCode.is_good(): + dv.Value.Value = ua.NodeClass(dv.Value.Value) + elif rv.AttributeId == ua.AttributeIds.ValueRank: + dv = response.Results[idx] + if dv.StatusCode.is_good(): + if dv.Value.Value in (-3, -2, -1, 0, 1, 2, 3, 4): + dv.Value.Value = ua.ValueRank(dv.Value.Value) + + Results = Results + response.Results + + return Results + + def del_all_variable(self): + self.variables.clear() + self.polling_maps.clear() + + def to_str_value(self, value, len): + try: + if isinstance(value, bytes): + raise ValueError("Not support bytes data.") + return str(value)[None[:len]] + except Exception as e: + try: + logger.error("str() error: %s, raw data: %s", (e.__str__(), value)) + finally: + e = None + del e + + return "" + + def convert_data(self, value, dtype): + if dtype == String: + if not isinstance(value, str): + return str(value) + else: + return value + if dtype == Float: + if not isinstance(value, float): + return "0.0" + return str(value) + return isinstance(value, int) or 0 + return value + + def read_variables_value(self, timestamp): + measure_polling_times = 0 + controller_health = 0 + measures = list() + try: + if not self.polling_maps: + logger.warn("The measuring point is empty.") + return measures + if self.device and not self.get_device_connect_status() or self._reconnect(): + raise IOError("Negative device connection.") + else: + raw_data = None + try: + if self.acquisition_mode == 1: + raw_data = self.__get_nodes_data__() + except concurrent.futures._base.TimeoutError: + raise TimeoutError("Read timeout.(concurrent)") + except socket.timeout: + self._reconnect() + raise TimeoutError("Read timeout.(socket)") + except Exception as e: + try: + logger.warn("Batch reading produces an exception. (device:%s) warn:%s" % ( + self.device_param["name"], repr(e))) + raw_data = None + finally: + e = None + del e + + index = 0 + last_nodeid = None + for tag_name in self.polling_maps["id"]: + tag = self.variables[tag_name] + try: + if not raw_data: + logger.debug("Read single node: %s" % tag_name) + node_data = self.device.get_node(tag.node_id).get_data_value() + raw_data_value = node_data.Value.Value + raw_data_type = node_data.Value.VariantType + else: + raw_data_value = raw_data[index].Value.Value + raw_data_type = raw_data[index].Value.VariantType + if tag.node_id != last_nodeid: + last_nodeid = tag.node_id + index += 1 + except Exception as e: + try: + logger.warn("Read single node exception. (device:%s, measure:%s) warn:%s" % ( + self.device_param["name"], tag.name, repr(e))) + raw_data_value = "" + raw_data_type = ua.VariantType.Null + finally: + e = None + del e + + measure_polling_times += 1 + if raw_data_type.value != ua.VariantType.Null.value: + if isinstance(raw_data_value, list): + if tag.value_idx is not None: + raw_data_value = raw_data_value[tag.value_idx] + if hasattr(raw_data_value, "DataType"): + if raw_data_value.DataType == 0: + raw_data_value = raw_data_value.IntValue + elif raw_data_value.DataType == 1: + f = raw_data_value.FloatValue + raw_data_value = round(f, tag.decimal if tag.decimal else FLOAT_REPR_DEFAULT) + else: + raw_data_value = raw_data_value.StringValue + else: + if ua.VariantType.Float.value <= raw_data_type.value <= ua.VariantType.Double.value: + if math.isnan(float(raw_data_value)): + raw_data_value = "NaN" + else: + raw_data_value = round(raw_data_value, tag.decimal if tag.decimal else FLOAT_REPR_DEFAULT) + else: + if ua.VariantType.Boolean.value == raw_data_type.value: + raw_data_value = 1 if raw_data_value else 0 + else: + if ua.VariantType.SByte.value <= raw_data_type.value <= ua.VariantType.UInt32.value: + pass + else: + raw_data_value = self.to_str_value(raw_data_value, tag.len) + raw_data_value = self.convert_data(raw_data_value, tag.dataType) + measures.append({'rawbytes':"", 'value':raw_data_value, 'health':1, + 'timestamp':timestamp, 'name':tag_name, 'dataType':OPCUA_DATA_TYPE_MAPS[raw_data_type.name]}) + controller_health = 1 + else: + measures.append({'rawbytes': '""', 'value': '""', + 'health': 0, 'timestamp': timestamp, 'name': tag_name, + 'dataType': '""'}) + + except Exception as error: + try: + logger.error("Read controller(%s) failed(%s)" % (self.device_param["name"], error)) + for tag_name in self.polling_maps["id"]: + measure_polling_times += 1 + measures.append({'rawbytes': '""', 'value': '""', + 'health': 0, 'timestamp': timestamp, 'name': tag_name, + 'dataType': '""'}) + + finally: + error = None + del error + + if measure_polling_times != 0: + self.controller_health = controller_health + else: + self.controller_health = self.get_device_connect_status() + return measures + + def write_variables_value(self, msg): + err_code = 0 + params = "" + if msg["name"] in self.variables: + var = self.variables[msg["name"]] + try: + if "rw" in var.readWrite: + if self.controller_health == 0: + err_code = 1 + params = self.device_param["name"] + raise ValueError("Device %s is not accessible." % self.device_param["name"]) + else: + logger.debug("Find Name: %s| Write value: %s| NodeId:%s" % ( + var.name, msg["value"], var.node_id)) + try: + data_value = self.device.get_node(var.node_id).get_data_value() + data_type = data_value.Value.VariantType + logger.debug("VariantType Identifier: %s" % data_type.value) + logger.debug("VariantType Name: %s" % data_type.name.upper()) + except Exception: + data_type = self.device.get_node(var.node_id).get_data_type_as_variant_type() + logger.debug("get_data_type_as_variant_type. %s" % data_type) + + data = self.__str_to_VariantType__(msg["value"], data_type) + if data_type == ua.VariantType.ExtensionObject: + array_element = data_value.Value.Value[var.value_idx] + if hasattr(array_element, "DataType"): + if array_element.DataType == 0: + data = int(data) + else: + if array_element.DataType == 1: + data = round(float(data), var.decimal if var.decimal else FLOAT_REPR_DEFAULT) + else: + data = str(data) + data_value.Value.Value[var.value_idx].IntValue = data + datavalue = ua.DataValue(data_value.Value.Value) + self.device.get_node(var.node_id).set_value(datavalue) + else: + datavalue = ua.DataValue(ua.Variant(data, data_type)) + self.device.get_node(var.node_id).set_value(datavalue, data_type) + return ( + err_code, params, None) + else: + err_code = 1 + params = "The measure %s does not support write operations." % var.name + logger.warn(params) + except Exception as e: + try: + err_code = 1 + params = e.__str__() + logger.error("[OPCUA] Write (%s) error : %s" % (var.name, params)) + finally: + e = None + del e + + else: + err_code = 1 + logger.error("Measure does not exist.") + return ( + err_code, params, None) + + def __str_to_VariantType__(self, data, data_type): + """ + VariantType: + Null = 0 + Boolean = 1 + SByte = 2 + Byte = 3 + Int16 = 4 + UInt16 = 5 + Int32 = 6 + UInt32 = 7 + Int64 = 8 + UInt64 = 9 + Float = 10 + Double = 11 + String = 12 + DateTime = 13 + Guid = 14 + ByteString = 15 + XmlElement = 16 + NodeId = 17 + ExpandedNodeId = 18 + StatusCode = 19 + QualifiedName = 20 + LocalizedText = 21 + ExtensionObject = 22 + DataValue = 23 + Variant = 24 + DiagnosticInfo = 25 + """ + if ua.VariantType.Int16.value <= data_type.value <= ua.VariantType.UInt64.value: + return int(data) + elif ua.VariantType.Float.value <= data_type.value <= ua.VariantType.Double.value: + return float(data) + if ua.VariantType.SByte.value <= data_type.value <= ua.VariantType.Byte.value: + return int(data) + if ua.VariantType.Boolean.value == data_type.value: + if isinstance(data, str): + data = data.lower() + if "true" == data: + data = True + elif "false" == data: + data = False + else: + if data in ('0', '1'): + data = eval(data) + return bool(data) + if data_type.value == ua.VariantType.String.value: + return data + if data_type.value == ua.VariantType.DateTime.value: + return datetime.datetime.strptime(data, "%Y-%m-%d %H:%M:%S") + if data_type.value == ua.VariantType.ByteString.value: + return data.encode("utf-8") + return data diff --git a/APPS_UNCOMPILED/src/drivers/S7Driver.py b/APPS_UNCOMPILED/src/drivers/S7Driver.py new file mode 100644 index 0000000..7e363ef --- /dev/null +++ b/APPS_UNCOMPILED/src/drivers/S7Driver.py @@ -0,0 +1,844 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/drivers/S7Driver.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 45642 bytes +""" +Drivers +Created on 2020/12/28 +@author: Lius +""" +import re, os, math, time, snap7, struct, random, libevent +from common.Logger import logger +from collections import namedtuple +from common.InternalPath import EC_SYSTEM_INFO +from snap7.snap7exceptions import Snap7Exception +from drivers.Drivers import POLLING_SECOND, POLLING_MILLISECOND, POLLING_SECOND2 +AREA_CODE_MAP = {'I':(snap7.snap7types).S7AreaPE, + 'Q':(snap7.snap7types).S7AreaPA, + 'M':(snap7.snap7types).S7AreaMK, + 'DB':(snap7.snap7types).S7AreaDB, + 'V':(snap7.snap7types).S7AreaDB, + 'C':(snap7.snap7types).S7AreaCT, + 'T':(snap7.snap7types).S7AreaTM} + +class S7Driver: + + def __init__(self, device_param, cont_acquisition, libevent_base, debug_log_callback): + self.controller_health = 0 + self.controller_polling1_status = 0 + self.controller_polling2_status = 0 + self.device_param = device_param + self.acquisition_mode = cont_acquisition + self.sample_period = device_param["samplePeriod"] + self.first_scan1_finish = False + self.scan1_is_running = False + self.last_scan1_time = self.get_system_uptime_in_s() + self.expired = device_param["expired"] + self.enable_msec_sample = device_param["args"]["enableMsecSample"] + if self.enable_msec_sample: + self.msec_sample_period = device_param["args"]["msecSamplePeriod"] + self.msec_package = device_param["args"]["msecPackage"] + self.enable_cust_sample = device_param["enablepollCycle"] + if self.enable_cust_sample: + self.sample_period2 = device_param["samplePeriod2"] + self.last_scan2_time = self.get_system_uptime_in_s() + self.first_scan2_finish = False + self.scan2_is_running = False + self._publish_measures_data = None + self.variables = dict() + self.variables_value = dict() + self.polling_maps = list() + self.cust_polling_maps = list() + self.msec_polling_maps = list() + self.controller_jsonpack = list() + self.controller_jsonpack_number = 0 + self.debug_log_callback = debug_log_callback + self.device = self.get_device_instance(self.device_param) + self.libevent_base = libevent_base + self.read_evt_timer = libevent.Timer((self.libevent_base), (self.read_timer), userdata=None) + if self.enable_msec_sample: + self.msec_read_evt_timer = libevent.Timer((self.libevent_base), (self.msec_read_timer), userdata=None) + if self.enable_cust_sample: + self.cust_read_evt_timer = libevent.Timer((self.libevent_base), (self.cust_read_timer), userdata=None) + + def get_system_uptime_in_s(self): + return time.clock_gettime(time.CLOCK_MONOTONIC) + + def get_device_instance(self, dev): + s7 = snap7.client.Client() + if os.path.exists(EC_SYSTEM_INFO): + s7.set_debug_log(self.debug_log_callback) + else: + from mobiuspi_lib.systeminfo import SystemInfo + if "IG974" not in SystemInfo().get_model_name(): + s7.set_debug_log(self.debug_log_callback) + else: + try: + endpoint = dev["endpoint"].split(":") + dev["ip"] = endpoint[0] + dev["port"] = int(endpoint[1]) + args = dev["args"] + if "continuousAcquisition" in args: + self.acquisition_mode = args["continuousAcquisition"] + if "mode" in args: + if args["mode"] == "TSAP": + s7.set_connection_type(random.randint(3, 16)) + if isinstance(args["localTsap"], str): + if "." in args["localTsap"]: + local_tsap = args["localTsap"].split(".") + args["localTsap"] = eval("0x%s%s" % (local_tsap[0], local_tsap[1])) + else: + args["localTsap"] = eval("0x%s" % args["localTsap"]) + if isinstance(args["remoteTsap"], str): + if "." in args["remoteTsap"]: + remote_tsap = args["remoteTsap"].split(".") + args["remoteTsap"] = eval("0x%s%s" % (remote_tsap[0], remote_tsap[1])) + else: + args["remoteTsap"] = eval("0x%s" % args["remoteTsap"]) + logger.info("%s ip:%s, port:%s, localTsap:%s, remoteTsap:%s" % (dev["protocol"], + dev["ip"], dev["port"], args["localTsap"], args["remoteTsap"])) + s7.connect_tsap(dev["ip"], args["localTsap"], args["remoteTsap"], dev["port"]) + else: + logger.info("%s ip:%s, port:%s, rack:%s, slot:%s" % (dev["protocol"], + dev["ip"], dev["port"], args["rack"], args["slot"])) + s7.connect(dev["ip"], int(args["rack"]), int(args["slot"]), dev["port"]) + device = s7 + logger.info("%s connect success" % dev["protocol"]) + except Exception as error: + try: + device = None + logger.error("%s connect failed(%s)" % (dev["protocol"], error)) + finally: + error = None + del error + + return device + + @property + def publish_measures_data(self): + return self._publish_measures_data + + @publish_measures_data.setter + def publish_measures_data(self, func): + self._publish_measures_data = func + + def read_timer(self, evt, userdata): + try: + self.scan1_is_running = True + logger.debug("Regularly polling (1) controller %s(period:%s)" % (self.device_param["name"], self.sample_period)) + values = dict({'name':self.device_param["name"], 'version':self.device_param["version"], 'health':self.controller_health, + 'timestamp':int(time.time()), 'measures':list()}) + try: + start_time = self.get_system_uptime_in_s() + values["measures"] = self.read_variables_value(self.polling_maps, POLLING_SECOND) + end_time = self.get_system_uptime_in_s() + logger.debug("read %s measures usetime:%sS" % (self.device_param["name"], end_time - start_time)) + except Exception as error: + try: + logger.warn("read %s measures failed(%s)" % (self.device_param["name"], error)) + finally: + error = None + del error + + values["timestamp"] = int(time.time()) + if self._publish_measures_data: + values["health"] = self.controller_health + self._publish_measures_data(values, POLLING_SECOND, None) + else: + use_time = self.get_system_uptime_in_s() - start_time + logger.debug("Regularly polling (1) all controller scan end, usetime: %s s" % use_time) + if int(use_time) > self.sample_period: + self.read_evt_timer.add(float(use_time + 0.2)) + else: + self.read_evt_timer.add(float(use_time + (self.sample_period - use_time))) + except Exception as error: + try: + logger.warn("Regularly polling (1) controller failed(%s)" % error) + self.read_evt_timer.add(float(self.sample_period)) + finally: + error = None + del error + + self.first_scan1_finish = True + self.scan1_is_running = False + self.last_scan1_time = self.get_system_uptime_in_s() + logger.debug("Regularly polling (1) end") + + def cust_read_timer(self, evt, userdata): + try: + self.scan2_is_running = True + logger.debug("Regularly polling (2) controller %s(period:%s)" % (self.device_param["name"], self.sample_period2)) + values = dict({'name':self.device_param["name"], 'version':self.device_param["version"], 'health':self.controller_health, + 'timestamp':int(time.time()), 'measures':list()}) + try: + start_time = self.get_system_uptime_in_s() + values["measures"] = self.read_variables_value(self.cust_polling_maps, POLLING_SECOND2) + end_time = self.get_system_uptime_in_s() + logger.debug("read %s measures usetime:%sS" % (self.device_param["name"], end_time - start_time)) + except Exception as error: + try: + logger.warn("read %s measures failed(%s)" % (self.device_param["name"], error)) + finally: + error = None + del error + + values["timestamp"] = int(time.time()) + if self._publish_measures_data: + values["health"] = self.controller_health + self._publish_measures_data(values, POLLING_SECOND2, None) + else: + use_time = self.get_system_uptime_in_s() - start_time + logger.debug("Regularly polling (2) all controller scan end, usetime: %s s" % use_time) + if int(use_time) > self.sample_period2: + self.cust_read_evt_timer.add(float(use_time + 0.2)) + else: + self.cust_read_evt_timer.add(float(use_time + (self.sample_period2 - use_time))) + except Exception as error: + try: + logger.warn("Regularly polling (2) controller failed(%s)" % error) + self.cust_read_evt_timer.add(float(self.sample_period2)) + finally: + error = None + del error + + self.first_scan2_finish = True + self.scan2_is_running = False + self.last_scan2_time = self.get_system_uptime_in_s() + logger.debug("Regularly polling (2) end") + + def msec_read_timer(self, evt, userdata): + try: + timestamp = time.time() + values = dict({'controller':[self.device_param["name"], self.controller_health, round(timestamp, 6)], 'measure':dict()}) + try: + start_time = self.get_system_uptime_in_s() + values["measure"] = self.read_msec_variables_value() + end_time = self.get_system_uptime_in_s() + logger.debug("read %s measures usetime:%sS" % (self.device_param["name"], end_time - start_time)) + values["controller"][1] = self.controller_health + self.controller_jsonpack.append(values) + self.controller_jsonpack_number = self.controller_jsonpack_number + 1 + except Exception as error: + try: + logger.warn("read %s measures failed(%s)" % (self.device_param["name"], error)) + finally: + error = None + del error + + if self.controller_jsonpack_number >= self.msec_package: + if self._publish_measures_data: + self._publish_measures_data(self.controller_jsonpack, POLLING_MILLISECOND, self.device_param["name"]) + self.controller_jsonpack = list() + self.controller_jsonpack_number = 0 + else: + use_time = self.get_system_uptime_in_s() - start_time + if use_time >= self.msec_sample_period / 1000: + self.msec_read_evt_timer.add(0.01) + else: + self.msec_read_evt_timer.add(float(self.msec_sample_period / 1000 - use_time)) + except Exception as error: + try: + logger.warn("Regularly millisecond polling controller failed(%s)" % error) + self.msec_read_evt_timer.add(float(self.msec_sample_period / 1000)) + finally: + error = None + del error + + logger.debug("Regularly millisecond polling end") + + def get_device_connect_status(self): + isconnect = 0 + if self.device: + try: + cpu_status = self.device.get_cpu_state() + if cpu_status == "S7CpuStatusRun": + isconnect = 1 + except Exception: + pass + + return int(isconnect) + + def disconnect(self): + try: + if self.device: + logger.info("Disconnect controller(%s)" % self.device_param["name"]) + self.device.disconnect() + self.device.destroy() + return True + except Exception as error: + try: + logger.error("Disconnect failed(%s)" % error) + return False + finally: + error = None + del error + + def _reconnect(self): + try: + self.disconnect() + self.device = self.get_device_instance(self.device_param) + if self.device: + return True + except Exception as e: + try: + logger.error("### Disconnect error. %s ###" % e) + finally: + e = None + del e + + self.device = None + return False + + def __get_unpack_fmt__(self, dtype, number): + if "BYTE" in dtype: + return "%s" % "B" * number + if "SINT" in dtype: + return "%s" % "b" * number + if "DWORD" in dtype or "ULONG" in dtype: + return "%s" % "I" * number + if "WORD" in dtype or "USHORT" in dtype: + return "%s" % "H" * number + if "DINT" in dtype or "LONG" in dtype: + return "%s" % "l" * number + if "INT" in dtype or "SHORT" in dtype: + return "%s" % "h" * number + if "FLOAT" in dtype or "REAL" in dtype: + return "%s" % "f" * number + if "DOUBLE" in dtype: + return "%s" % "d" * number + if "STRING" in dtype: + return "%s" % "s" * number + if "BCD" in dtype: + return "%s" % "bb" * number + return "None" + + def __get_s7_type_length__Parse error at or near `LOAD_CONST' instruction at offset 404 + + def __parsing_var__(self, var): + if "unit" in var: + del var["unit"] + if "desc" in var: + del var["desc"] + if "sendType" in var: + del var["sendType"] + var["decimal"] = int(var["decimal"]) if ("decimal" in var and var["decimal"]) else 2 + addr = var["addr"] + if re.match("db", addr, re.M | re.I): + var["register_type"] = addr[None[:2]] + addr_list = addr[2[:None]].split(".") + var["dbnumber"] = int(addr_list[0]) + var["register_addr"] = int(addr_list[1]) + if len(addr_list) >= 3: + var["register_bit"] = int(addr_list[2]) + else: + var["register_type"] = addr[0] + var["dbnumber"] = 0 + addr_list = addr[1[:None]].split(".") + var["register_addr"] = int(addr_list[0]) + if len(addr_list) >= 2: + var["register_bit"] = int(addr_list[1]) + else: + var["area_code"] = AREA_CODE_MAP[var["register_type"]] + var["size"] = var["len"] if ("len" in var and var["len"]) else 2 + var["codeType"] = var["codeType"] if var["dataType"] == "STRING" else "UTF-8" + if var["codeType"] == "UTF-16": + var["codeType"] = "utf-16-le" + if var["codeType"] == "UTF-16-BIG": + var["codeType"] = "utf-16-be" + if "size" in var: + var["size"] = int(var["size"]) if re.match("string", var["dataType"], re.M | re.I) else 1 + var["number"] = var["size"] + else: + var["number"] = 1 + var["size"] = 1 + size = self.__get_s7_type_length__(var["dataType"]) * var["size"] + var["fmt"] = self.__get_unpack_fmt__(var["dataType"].upper(), var["size"]) + var["register_info"] = var["addr"] + var["size"] = size + if re.match("v", var["register_type"], re.M | re.I): + var["dbnumber"] = 1 + return (namedtuple("snap7_var", var.keys()))(*var.values()) + + def __new_polling_dict__(self, start_addr, end_addr, dataType, register_type, area_code, dbnumber, length, vid): + return {'start_addr':start_addr, 'end_addr':end_addr, 'dataType':dataType, 'register_type':register_type, + 'area_code':area_code, 'dbnumber':dbnumber, 'length':length, + 'member':set((vid,))} + + def find_polling_command_dict(self, var_name): + for pm in self.polling_maps: + if var_name in pm["member"]: + return pm + + return {} + + def refresh_polling_command_dict(self, member): + for tag_name in member: + try: + if self.enable_msec_sample: + if self.variables[tag_name].msecSample: + self.add_var_to_polling(self.variables[tag_name], self.msec_polling_maps) + if self.enable_cust_sample and self.variables[tag_name].pollCycle: + self.add_var_to_polling(self.variables[tag_name], self.cust_polling_maps) + else: + self.add_var_to_polling(self.variables[tag_name], self.polling_maps) + except Exception as e: + try: + logger.error("Refresh polling dict error. %s" % e) + finally: + e = None + del e + + def update_variablesParse error at or near `POP_BLOCK' instruction at offset 412 + + def add_var_to_polling(self, s7_var, plling_maps): + start_addr = s7_var.register_addr + end_addr = s7_var.register_addr + s7_var.size - 1 + is_exist = False + for pm in plling_maps: + if re.match("string", s7_var.dataType, re.M | re.I): + break + if re.match("string", pm["dataType"], re.M | re.I): + continue + if pm["length"] + s7_var.size > 2048: + continue + if s7_var.dbnumber != pm["dbnumber"]: + continue + if s7_var.register_type != pm["register_type"]: + continue + if pm["area_code"] != s7_var.area_code: + continue + cmp_min_addr = pm["start_addr"] - s7_var.size + cmp_max_addr = pm["end_addr"] + 1 + if cmp_min_addr <= s7_var.register_addr <= cmp_max_addr: + is_exist = True + if start_addr <= pm["start_addr"]: + pm["length"] += pm["start_addr"] - start_addr + pm["start_addr"] = start_addr + if end_addr >= pm["end_addr"]: + pm["length"] += end_addr - pm["end_addr"] + pm["end_addr"] = end_addr + pm["member"].add(s7_var.name) + break + + if not is_exist: + plling_maps.append(self.__new_polling_dict__(start_addr, end_addr, s7_var.dataType, s7_var.register_type, s7_var.area_code, s7_var.dbnumber, s7_var.size, s7_var.name)) + + def del_all_variable(self): + self.variables.clear() + self.polling_maps = list() + self.msec_polling_maps = list() + self.cust_polling_maps = list() + + def get_bit_val(self, byte, index): + if byte & 1 << index: + return 1 + return 0 + + def set_bit_val(self, byte, index, val): + if val: + return byte | 1 << index + return byte & ~(1 << index) + + def __bytes_to_raw_data__(self, var, data): + if re.match("bool", var.dataType, re.M | re.I): + data = 1 if snap7.util.get_bool(data, 0, var.register_bit) else 0 + else: + if re.match("bit", var.dataType, re.M | re.I): + data = 1 if snap7.util.get_bool(data, 0, var.register_bit) else 0 + else: + if re.match("string", var.dataType, re.M | re.I): + if re.match("db", var.register_type, re.M | re.I): + length = int(data[1]) + data = data[2[:None]] if len(data[2[:None]]) < length else data[2[:var.size + 2]] + else: + data = data[0[:var.size]] + data = data.decode(var.codeType, "ignore") + else: + data = struct.unpack("!" + var.fmt, data) + if re.match("bcd", var.dataType, re.M | re.I): + native_bcd_list = [(data[x], data[x + 1]) for x in range(0, len(data), 2)] + data = list() + for native_bcd in native_bcd_list: + bcd = "".join([str(((native_bcd[0] & 240) >> 4) % 10), str((native_bcd[0] & 15) % 10), + str(((native_bcd[1] & 240) >> 4) % 10), str((native_bcd[1] & 15) % 10)]) + data.append(int(bcd)) + + if len(data) == 1: + data = data[0] + if isinstance(data, float): + if math.isnan(float(data)): + data = "NaN" + else: + data = str(round(data, var.decimal)) + return data + + def __execute_read__(self, tag): + if re.match("db", tag["register_type"], re.I) and re.match("string", tag["dataType"], re.I): + data = self.device.read_area(tag["area_code"], tag["dbnumber"], tag["start_addr"], tag["length"] + 2) + else: + data = self.device.read_area(tag["area_code"], tag["dbnumber"], tag["start_addr"], tag["length"]) + return data + + def to_str_bytes(self, bytes_data): + return "".join(map((lambda x: ("\x00x" if len(hex(x)) >= 4 else "\x00x0") + hex(x)[2[:None]]), bytes_data)) + + def read_single_measure(self, tag_name, device_name): + tag = dict(self.variables[tag_name]._asdict()) + tag["start_addr"] = tag["register_addr"] + tag["length"] = tag["size"] + try: + logger.debug("Execute(ISO-on-TCP) -> type:%s dbnumber:%s addr:%s len:%s device:%s" % ( + tag["register_type"], tag["dbnumber"], tag["start_addr"], tag["length"], device_name)) + bytes_data = self.__execute_read__(tag) + if self.variables[tag_name].dataType in ('BYTE', 'SINT', 'WORD', 'INT', + 'DWORD', 'DINT', 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[tag_name].enableBit: + raw_data = self.get_bit_val(bytes_data[int(self.variables[tag_name].bitIndex / 8)], self.variables[tag_name].bitIndex) + self.variables_value[tag_name] = self.__bytes_to_raw_data__(self.variables[tag_name], bytes_data) + else: + raw_data = self.__bytes_to_raw_data__(self.variables[tag_name], bytes_data) + self.variables_value[tag_name] = raw_data + return {'rawbytes':"", + 'value':raw_data, 'health':1, + 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tag_name]).measureIndex, + 'name':tag_name} + except Exception as e: + try: + logger.error("Read error [ISO-on-TCP] @ %s (device:%s tag:%s)" % (e, device_name, tag["name"])) + finally: + e = None + del e + + return {'rawbytes':"", + 'value':"", 'health':0, 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tag_name]).measureIndex, + 'name':tag_name} + + def read_single_msec_measure(self, tag_name, device_name): + tag = dict(self.variables[tag_name]._asdict()) + tag["start_addr"] = tag["register_addr"] + tag["length"] = tag["size"] + try: + logger.debug("Execute(ISO-on-TCP) -> type:%s dbnumber:%s addr:%s len:%s device:%s" % ( + tag["register_type"], tag["dbnumber"], tag["start_addr"], tag["length"], device_name)) + bytes_data = self.__execute_read__(tag) + if self.variables[tag_name].dataType in ('BYTE', 'SINT', 'WORD', 'INT', + 'DWORD', 'DINT', 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[tag_name].enableBit: + raw_data = self.get_bit_val(bytes_data[int(self.variables[tag_name].bitIndex / 8)], self.variables[tag_name].bitIndex) + self.variables_value[tag_name] = self.__bytes_to_raw_data__(self.variables[tag_name], bytes_data) + else: + raw_data = self.__bytes_to_raw_data__(self.variables[tag_name], bytes_data) + self.variables_value[tag_name] = raw_data + return [ + 1, raw_data] + except Exception as e: + try: + logger.error("Read error [ISO-on-TCP] @ %s (device:%s tag:%s)" % (e, device_name, tag["name"])) + finally: + e = None + del e + + return [ + 0, ""] + + def read_variables_value(self, polling_maps, pollingType): + measure_polling_times = 0 + measures = list() + if not self.device: + if self._reconnect() is False: + self.controller_health = 0 + for tag in polling_maps: + for tag_name in tag["member"]: + measures.append({'rawbytes':"", + 'value':"", 'health':0, 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tag_name]).measureIndex, + 'name':tag_name}) + + return measures + else: + device_name = self.device_param["name"] + for tag in polling_maps: + if self.acquisition_mode == 0: + for tag_name in tag["member"]: + measure_polling_times += 1 + single_measure = self.read_single_measure(tag_name, device_name) + if single_measure["health"] == 1: + if pollingType == POLLING_SECOND: + self.controller_polling1_status = 1 + else: + if pollingType == POLLING_SECOND2: + self.controller_polling2_status = 1 + measures.append(single_measure) + + else: + try: + logger.debug("Execute(ISO-on-TCP)s -> type:%s dbnumber:%s addr:%s len:%s device:%s" % ( + tag["register_type"], tag["dbnumber"], tag["start_addr"], tag["length"], device_name)) + data = self.__execute_read__(tag) + for tag_name in tag["member"]: + var = self.variables[tag_name] + var_addr_offset = var.register_addr - tag["start_addr"] + if re.match("string", var.dataType, re.M | re.I): + bytes_data = data + else: + bytes_data = data[var_addr_offset[:var_addr_offset + var.size]] + if self.variables[tag_name].dataType in ('BYTE', 'SINT', + 'WORD', 'INT', + 'DWORD', 'DINT', + 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[tag_name].enableBit: + raw_data = self.get_bit_val(bytes_data[int(self.variables[tag_name].bitIndex / 8)], self.variables[tag_name].bitIndex) + self.variables_value[tag_name] = self.__bytes_to_raw_data__(var, bytes_data) + else: + raw_data = self.__bytes_to_raw_data__(var, bytes_data) + self.variables_value[tag_name] = raw_data + measure_polling_times += 1 + if pollingType == POLLING_SECOND: + self.controller_polling1_status = 1 + else: + if pollingType == POLLING_SECOND2: + self.controller_polling2_status = 1 + measures.append({'rawbytes':"", + 'value':raw_data, 'health':1, 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':var.measureIndex, + 'name':tag_name}) + + continue + except Snap7Exception as e: + try: + if "Address out of range" in e.__str__() and len(tag["member"]) > 1: + for tag_name in tag["member"]: + measure_polling_times += 1 + single_measure = self.read_single_measure(tag_name, device_name) + if single_measure["health"] == 1: + if pollingType == POLLING_SECOND: + self.controller_polling1_status = 1 + else: + if pollingType == POLLING_SECOND2: + self.controller_polling2_status = 1 + measures.append(single_measure) + + continue + else: + if "Item not available" in e.__str__(): + logger.debug("#### %s ####" % e.__str__()) + else: + self._reconnect() + finally: + e = None + del e + + except Exception as e: + try: + logger.error("Read error [ISO-on-TCP] @ %s (type:%s dbnumber:%s addr:%s len:%s)" % (e.__str__(), + tag["register_type"], + tag["dbnumber"], + tag["start_addr"], + tag["length"])) + finally: + e = None + del e + + for tag_name in tag["member"]: + measure_polling_times += 1 + measures.append({'rawbytes':"", + 'value':"", 'health':0, 'timestamp':int(time.time()), 'timestampMsec':int(round(time.time(), 3) * 1000), + 'measureIndex':(self.variables[tag_name]).measureIndex, + 'name':tag_name}) + + if self.controller_polling1_status == 1 or self.controller_polling2_status == 1: + self.controller_health = 1 + else: + self.controller_health = 0 + return measures + + def read_msec_variables_value(self): + measure_polling_times = 0 + controller_health = 0 + measures = dict() + if not self.device: + if self._reconnect() is False: + self.controller_health = 0 + for tag in self.msec_polling_maps: + for tag_name in tag["member"]: + measures[tag_name] = [ + 0, ""] + + return measures + device_name = self.device_param["name"] + for tag in self.msec_polling_maps: + if self.acquisition_mode == 0: + for tag_name in tag["member"]: + measure_polling_times += 1 + single_measure = self.read_single_msec_measure(tag_name, device_name) + if single_measure[0] == 1: + controller_health = 1 + measures[tag_name] = single_measure + + else: + try: + logger.debug("Execute(ISO-on-TCP)s -> type:%s dbnumber:%s addr:%s len:%s device:%s" % ( + tag["register_type"], tag["dbnumber"], tag["start_addr"], tag["length"], device_name)) + data = self.__execute_read__(tag) + for tag_name in tag["member"]: + var = self.variables[tag_name] + var_addr_offset = var.register_addr - tag["start_addr"] + if re.match("string", var.dataType, re.M | re.I): + bytes_data = data + else: + bytes_data = data[var_addr_offset[:var_addr_offset + var.size]] + if self.variables[tag_name].dataType in ('BYTE', 'SINT', 'WORD', + 'INT', 'DWORD', + 'DINT', 'BCD', 'BCD32', + 'ULONG', 'LONG') and self.variables[tag_name].enableBit: + raw_data = self.get_bit_val(bytes_data[int(self.variables[tag_name].bitIndex / 8)], self.variables[tag_name].bitIndex) + self.variables_value[tag_name] = self.__bytes_to_raw_data__(var, bytes_data) + else: + raw_data = self.__bytes_to_raw_data__(var, bytes_data) + self.variables_value[tag_name] = raw_data + measure_polling_times += 1 + controller_health = 1 + measures[tag_name] = [1, raw_data] + + continue + except Snap7Exception as e: + try: + if "Address out of range" in e.__str__() and len(tag["member"]) > 1: + for tag_name in tag["member"]: + measure_polling_times += 1 + single_measure = self.read_single_msec_measure(tag_name, device_name) + if single_measure[0] == 1: + controller_health = 1 + measures[tag_name] = single_measure + + continue + else: + if "Item not available" in e.__str__(): + logger.debug("#### %s ####" % e.__str__()) + else: + self._reconnect() + finally: + e = None + del e + + except Exception as e: + try: + logger.error("Read error [ISO-on-TCP] @ %s (type:%s dbnumber:%s addr:%s len:%s)" % (e.__str__(), + tag["register_type"], + tag["dbnumber"], + tag["start_addr"], + tag["length"])) + finally: + e = None + del e + + for tag_name in tag["member"]: + measure_polling_times += 1 + measures[tag_name] = [0, ""] + + self.controller_health = controller_health + return measures + + def write_variables_value(self, msg): + err_code = 0 + params = "" + if msg["name"] in self.variables: + tag = self.variables[msg["name"]] + try: + if "rw" in tag.readWrite: + logger.debug("Find Name: %s| Write value: %s| Register:%s" % ( + tag.name, msg["value"], tag.register_info)) + if not self.device or self.controller_health == 0: + err_code = 1 + params = self.device_param["name"] + raise ValueError("Device %s is not accessible." % self.device_param["name"]) + if self.variables[msg["name"]].dataType in ('BYTE', 'SINT', 'WORD', + 'INT', 'DWORD', 'DINT', + 'BCD', 'BCD32', 'ULONG', + 'LONG') and self.variables[msg["name"]].enableBit: + data = self.set_bit_val(self.variables_value[msg["name"]], self.variables[msg["name"]].bitIndex, msg["value"]) + else: + data = msg["value"] + if re.match("bool", tag.dataType, re.M | re.I) or re.match("bit", tag.dataType, re.M | re.I): + rdata = self.device.read_area(tag.area_code, tag.dbnumber, tag.register_addr, tag.size) + if isinstance(data, str): + if data in ('0', '1', 'True', 'False'): + data = eval(data) + else: + if "true" == data.lower(): + data = True + else: + if "false" == data.lower(): + data = False + snap7.util.set_bool(rdata, 0, tag.register_bit, data) + wdata = rdata + else: + if re.match("string", tag.dataType, re.M | re.I): + if len(data) > tag.size: + data = data[None[:tag.size]] + elif re.match("db", tag.register_type, re.M | re.I): + header = struct.pack("!B", 254) + length = struct.pack("!B", len(data)) + wdata = header + length + bytearray(data, tag.codeType) + bytearray("\x00", tag.codeType) + else: + wdata = bytearray(data, tag.codeType) + bytearray("\x00", tag.codeType) + else: + if re.match("bcd", tag.dataType, re.M | re.I): + data = str(data).zfill(4) + h8bit = (int(data[0]) & 15) << 4 | int(data[1]) + l8bit = (int(data[2]) & 15) << 4 | int(data[3]) + wdata = struct.pack("!B", h8bit) + struct.pack("!B", l8bit) + else: + if isinstance(data, str): + data = eval(data) + wdata = struct.pack("!" + tag.fmt, data) + self.device.write_area(tag.area_code, tag.dbnumber, tag.register_addr, wdata) + raw_data = self.device.read_area(tag.area_code, tag.dbnumber, tag.register_addr, tag.size) + value = self.__bytes_to_raw_data__(self.variables[msg["name"]], raw_data) + return ( + err_code, params, value) + err_code = 1 + params = tag.name + logger.warn("The measure %s does not support write operations." % tag.name) + except Exception as e: + try: + err_code = 1 + params = e.__str__() + logger.error("[ISO-on-TCP] Write (%s) error : %s" % (tag.name, params)) + finally: + e = None + del e + + else: + err_code = 1 + params = "Measure does not exist." + logger.error("Measure does not exist.") + return ( + err_code, params, None) + + def recall_variables_value(self, msg): + measures = list() + try: + recall_polling_maps = list() + for meaName in msg["measures"]: + if meaName in self.variables: + tag = self.variables[meaName] + self.add_var_to_polling(tag, recall_polling_maps) + else: + logger.error("Measure (%s) does not exist." % meaName) + + measures = self.read_variables_value(recall_polling_maps, POLLING_SECOND2) + except Exception as e: + try: + params = e.__str__() + logger.error("[ISO-on-TCP] Recall read (%s) error : %s" % (msg["measures"], params)) + finally: + e = None + del e + + return measures \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/drivers/__init__.py b/APPS_UNCOMPILED/src/drivers/__init__.py new file mode 100644 index 0000000..88e3456 --- /dev/null +++ b/APPS_UNCOMPILED/src/drivers/__init__.py @@ -0,0 +1,15 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/drivers/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 155 bytes +""" +Drivers +Created on 2021/1/4 +@author: Lius +""" +import drivers.Drivers as Drivers +__all__ = [ + "Drivers"] diff --git a/APPS_UNCOMPILED/src/drvr.py b/APPS_UNCOMPILED/src/drvr.py new file mode 100644 index 0000000..b6a6c91 --- /dev/null +++ b/APPS_UNCOMPILED/src/drvr.py @@ -0,0 +1,39 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/drvr.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1162 bytes +""" +Drivers +Created on 2021/1/6 +@author: Lius +""" +import argparse, sys, os +from drivers import Drivers +VAR_RUN_PYTHON_DS2_PATH = "/var/run/python/ds2" + +def write_pid_to_file(service_id): + if not os.path.exists(VAR_RUN_PYTHON_DS2_PATH): + os.mkdir(VAR_RUN_PYTHON_DS2_PATH) + pid_file_path = VAR_RUN_PYTHON_DS2_PATH + "/" + "drvr.py-" + str(service_id) + if os.path.exists(pid_file_path): + os.remove(pid_file_path) + with open(pid_file_path, "w") as f: + f.write(str(os.getpid())) + + +def main(argv=sys.argv): + ap = argparse.ArgumentParser(description="Drivers Server") + ap.add_argument("-s", "--service_id", action="store", required=True, help="Service id.") + ap.add_argument("-c", "--config", action="store", required=True, help="Config file path.") + args = ap.parse_args(argv[1[:None]]) + write_pid_to_file(int(args.service_id)) + dev = Drivers(args.service_id, args.config) + cfg = dev.load_config() + dev.run_driver(cfg) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/src/main.py b/APPS_UNCOMPILED/src/main.py new file mode 100644 index 0000000..b216641 --- /dev/null +++ b/APPS_UNCOMPILED/src/main.py @@ -0,0 +1,42 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/main.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 843 bytes +import sys +import master.Master as Master +from common.Logger import logger +from common.Constant import AppInfo + +def main(argv=sys.argv): + logger.info("App device_supervisor start...") + app = Master(AppInfo.VENDOR, AppInfo.APP_NAME) + if app.mobiuspi.model_name[None[:2]].startswith("EC") or app.mobiuspi.model_name[None[:3]].startswith("HEC"): + try: + app.ec_verify_gateway() + except Exception as error: + try: + logger.error("%s" % error) + sys.exit(1) + finally: + error = None + del error + + app.init() + try: + app.run() + except Exception as err: + try: + logger.error("master exception error: %s" % err) + finally: + err = None + del err + + app.deinit() + logger.info("App device_supervisor exit...") + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/src/master/AdaptConfig.py b/APPS_UNCOMPILED/src/master/AdaptConfig.py new file mode 100644 index 0000000..3fc6936 --- /dev/null +++ b/APPS_UNCOMPILED/src/master/AdaptConfig.py @@ -0,0 +1,910 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/AdaptConfig.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 43988 bytes +import os, re, time, shutil +from common import InDB +from common.Logger import logger +DEVICE_TABLE = "device_tb" +GROUP_TABLE = "group_tb" +REMOTE_TABLE = "remote_tb" +SERIAL_TABLE = "serial_tb" +GLOBAL_TABLE = "global_tb" +PUB_TABLE = "pub_tb" +SUB_TABLE = "sub_tb" +WARN_TABLE = "warn_tb" +DEFAULT_TABLE = "default_tb" +CLOUD_IDENTIFIER_MAPS = { + 'Standard MQTT': '"GeneralMqtt"', + 'Aliyun IoT': '"AliyunLink"', + 'AWS IoT': '"AWSIoT"', + 'Azure IoT': '"AzureIoT"'} + +class AdaptConfig(object): + + def __init__(self, app_name, mobiuspi): + self.app_name = app_name + self.serial_number = mobiuspi.serial_number + self.filename = mobiuspi.cfg_path + self.db_dir = mobiuspi.app_config.get_app_db_base_path() + self.app_db_path = mobiuspi.app_config.get_app_db_path() + self.serial_tb = None + self.remote_tb = None + self.device_tb = None + self.group_tb = None + self.global_tb = None + self.default_tb = None + self.pub_tb = None + self.sub_tb = None + self.warn_tb = None + self.db = InDB.InDB(dbType=(InDB.DB_TYP_PRIVATE), dbRole=(InDB.DB_ROL_OWNER)) + self.global_device_dict = dict() + self.collect_groups = dict() + self.alarm_groups = dict() + self.found_db_dir = False + self.db_cfg = dict() + self.aliyun_product_key = "" + self.aliyun_device_name = "" + self.azure_iot_device_name = "" + self.cloud_type = "Standard MQTT" + self._open_db() + self._create_tables() + self._read_all_db() + self.dump_config_file() + logger.info("read v1 data ok") + time.sleep(0.2) + self.clear_db() + + def clear_db(self): + try: + self.db.remove(DEVICE_TABLE) + self.db.remove(GROUP_TABLE) + self.db.remove(REMOTE_TABLE) + self.db.remove(SERIAL_TABLE) + self.db.remove(GLOBAL_TABLE) + self.db.remove(PUB_TABLE) + self.db.remove(SUB_TABLE) + self.db.remove(WARN_TABLE) + self.db.remove(DEFAULT_TABLE) + shutil.rmtree(self.app_db_path) + except Exception as e: + try: + logger.error("clear db error : %s" % e) + finally: + e = None + del e + + def _open_db(self): + try: + if os.path.exists(self.db_dir): + if os.path.exists(self.app_db_path): + self.db.open(self.app_name) + self.found_db_dir = True + except Exception as e: + try: + logger.error("open db file error : %s" % e) + finally: + e = None + del e + + def _create_tables(self): + try: + if self.found_db_dir: + self.device_tb = self.db.get_table(DEVICE_TABLE, tblType=(InDB.TBL_TYP_KV)) + self.group_tb = self.db.get_table(GROUP_TABLE, tblType=(InDB.TBL_TYP_KV)) + self.remote_tb = self.db.get_table(REMOTE_TABLE, tblType=(InDB.TBL_TYP_KV)) + self.serial_tb = self.db.get_table(SERIAL_TABLE, tblType=(InDB.TBL_TYP_KV)) + self.global_tb = self.db.get_table(GLOBAL_TABLE, tblType=(InDB.TBL_TYP_KV)) + self.pub_tb = self.db.get_table(PUB_TABLE, tblType=(InDB.TBL_TYP_KV)) + self.sub_tb = self.db.get_table(SUB_TABLE, tblType=(InDB.TBL_TYP_KV)) + self.warn_tb = self.db.get_table(WARN_TABLE, tblType=(InDB.TBL_TYP_KV)) + self.default_tb = self.db.get_table(DEFAULT_TABLE, tblType=(InDB.TBL_TYP_KV)) + except Exception as e: + try: + logger.error("create db error : %s" % e) + finally: + e = None + del e + + def _read_all_db(self): + self.read_group_tb() + self.read_device_tb() + self.read_alarm_tb() + self.read_cloud_tb() + self.read_serail_tb() + self.read_global_tb() + self.read_pub_tb() + self.read_sub_tb() + self.read_default_tb() + if "quickfaas" not in self.db_cfg: + self.db_cfg["quickfaas"] = dict() + + def dump_config_file(self): + try: + print("***************** %s *********************" % self.filename) + print("***************** %s *********************" % self.db_cfg) + if not os.path.exists("/var/user/cfg/device_supervisor"): + os.mkdir("/var/user/cfg/device_supervisor") + with open((self.filename), "w+", encoding="utf-8") as f: + import rapidjson + rapidjson.dump((self.db_cfg), f, indent=1, ensure_ascii=False) + return True + except Exception as e: + try: + raise IOError("Write config file error %s" % e) + finally: + e = None + del e + + def set_azure_iot_device_name(self, connect_string): + try: + self.azure_iot_device_name = re.search(";DeviceId=(.*);", connect_string).group(1) + except Exception: + pass + + def get_snap7_var_addr(self, var): + reg_type = var["register_type"] + data_type = var["data_type"] + if re.match("BOOL", data_type, re.M | re.I) or re.match("BIT", data_type, re.M | re.I): + if re.match("db", reg_type, re.M | re.I): + var_info = "DB%s.%s.%s" % (var["dbnumber"], var["register_addr"], var["register_bit"]) + else: + var_info = "%s%s.%s" % (reg_type, var["register_addr"], var["register_bit"]) + else: + if re.match("db", reg_type, re.M | re.I): + var_info = "DB%s.%s" % (var["dbnumber"], var["register_addr"]) + else: + var_info = "%s%s" % (reg_type, var["register_addr"]) + return var_info + + def get_modbus_var_addrParse error at or near `COME_FROM' instruction at offset 242_1 + + def get_opcua_var_addr(self, var): + idt = "" + if "identifier_type" in var: + if var["identifier_type"].lower() == "string": + idt = "s" + else: + if var["identifier_type"].lower() == "number": + idt = "i" + else: + if var["identifier_type"] == "GUID": + idt = "g" + else: + if var["identifier_type"] == "OPAQUE": + idt = "b" + else: + logger.warn("Invalid identifier_type %s" % var["identifier_type"]) + return "" + if "namespace_index" in var: + if "identifier" in var: + ns = var["namespace_index"] + ide = var["identifier"] + if ns == 0 or ns == "0": + node_id = "{0}={1}".format(idt, ide) + else: + node_id = "ns={0};{1}={2}".format(ns, idt, ide) + return node_id + logger.warn("Invalid namespace_index/identifier : %s" % var) + return "" + + def get_var_send_type(self, mode): + sent_type = "" + if mode == "realtime": + sent_type = "periodic" + else: + if mode == "onchange": + sent_type = "onChange" + return sent_type + + def map_var_read_write(self, var): + if "read_write" in var: + read_write = var["read_write"] + if read_write == "read": + return "ro" + if read_write == "write": + return "wo" + return "rw" + else: + return "ro" + + def map_var_to_measure(self, protocol, var): + ignoer_opcua_and_enip = True + mea = dict() + if "var_name" not in var: + return + if protocol == "ISO-on-TCP": + mea["name"] = var["var_name"] + mea["ctrlName"] = var["device"] + mea["group"] = var["group"] + mode = self.get_var_send_type(var["mode"]) if "mode" in var else "periodic" + mea["uploadType"] = mode + mea["dataType"] = var["data_type"] if var["data_type"] != "BOOL" else "BIT" + mea["decimal"] = var["float_repr"] if "float_repr" in var else 2 + mea["len"] = var["size"] if "size" in var else 2 + mea["readWrite"] = self.map_var_read_write(var) + mea["unit"] = var["unit"] if "unit" in var else "" + mea["desc"] = var["desc"] if "desc" in var else "" + mea["addr"] = self.get_snap7_var_addr(var) + mea["transformType"] = var["data_operation"] if ("data_operation" in var and var["data_operation"]) else 0 + if mea["transformType"] == 1: + mea["maxValue"] = str(var["max_limit"]) if "max_limit" in var else "" + mea["minValue"] = str(var["min_limit"]) if "min_limit" in var else "" + mea["maxScaleValue"] = str(var["max_scale"]) if "max_scale" in var else "" + mea["minScaleValue"] = str(var["min_scale"]) if "min_scale" in var else "" + else: + if mea["transformType"] == 2: + mea["gain"] = str(var["scale"]) if "scale" in var else "1.0" + mea["offset"] = str(var["offset"]) if "offset" in var else "0.0" + else: + if mea["transformType"] == 3: + mea["startBit"] = str(var["startBit"]) if "startBit" in var else 0 + mea["endBit"] = str(var["endBit"]) if "endBit" in var else 1 + self.db_cfg["measures"].append(mea) + else: + if protocol in ('ModbusRTU', 'Modbus-RTU', 'ModbusTCP', 'Modbus-TCP'): + mea["name"] = var["var_name"] + mea["ctrlName"] = var["device"] + mea["group"] = var["group"] + mode = self.get_var_send_type(var["mode"]) if "mode" in var else "periodic" + mea["uploadType"] = mode + mea["dataType"] = var["data_type"] if var["data_type"] != "BOOL" else "BIT" + mea["decimal"] = var["float_repr"] if "float_repr" in var else 2 + mea["len"] = var["size"] if "size" in var else 2 + mea["readWrite"] = self.map_var_read_write(var) + mea["unit"] = var["unit"] if "unit" in var else "" + mea["desc"] = var["desc"] if "desc" in var else "" + mea["addr"] = self.get_modbus_var_addr(var) + mea["transformType"] = var["data_operation"] if ("data_operation" in var and var["data_operation"]) else 0 + if mea["transformType"] == 1: + mea["maxValue"] = str(var["max_limit"]) if "max_limit" in var else "" + mea["minValue"] = str(var["min_limit"]) if "min_limit" in var else "" + mea["maxScaleValue"] = str(var["max_scale"]) if "max_scale" in var else "" + mea["minScaleValue"] = str(var["min_scale"]) if "min_scale" in var else "" + else: + if mea["transformType"] == 2: + mea["gain"] = str(var["scale"]) if "scale" in var else "1.0" + mea["offset"] = str(var["offset"]) if "offset" in var else "0.0" + else: + if mea["transformType"] == 3: + mea["startBit"] = str(var["startBit"]) if "startBit" in var else 0 + mea["endBit"] = str(var["endBit"]) if "endBit" in var else 1 + self.db_cfg["measures"].append(mea) + else: + if protocol == "EtherNet/IP": + if ignoer_opcua_and_enip: + return + mea["name"] = var["var_name"] + mea["ctrlName"] = var["device"] + mea["group"] = var["group"] + mode = self.get_var_send_type(var["mode"]) if "mode" in var else "periodic" + mea["uploadType"] = mode + mea["dataType"] = var["data_type"] if "data_type" in var else "" + mea["decimal"] = var["float_repr"] if "float_repr" in var else 2 + mea["len"] = var["size"] if "size" in var else 2 + mea["readWrite"] = self.map_var_read_write(var) + mea["unit"] = var["unit"] if "unit" in var else "" + mea["desc"] = var["desc"] if "desc" in var else "" + mea["addr"] = var["symbol"] + mea["transformType"] = var["data_operation"] if ("data_operation" in var and var["data_operation"]) else 0 + if mea["transformType"] == 1: + mea["maxValue"] = str(var["max_limit"]) if "max_limit" in var else "" + mea["minValue"] = str(var["min_limit"]) if "min_limit" in var else "" + mea["maxScaleValue"] = str(var["max_scale"]) if "max_scale" in var else "" + mea["minScaleValue"] = str(var["min_scale"]) if "min_scale" in var else "" + else: + if mea["transformType"] == 2: + mea["gain"] = str(var["scale"]) if "scale" in var else "1.0" + mea["offset"] = str(var["offset"]) if "offset" in var else "0.0" + else: + if mea["transformType"] == 3: + mea["startBit"] = str(var["startBit"]) if "startBit" in var else 0 + mea["endBit"] = str(var["endBit"]) if "endBit" in var else 1 + self.db_cfg["measures"].append(mea) + else: + if protocol == "OPC-UA": + if ignoer_opcua_and_enip: + return + mea["name"] = var["var_name"] + mea["ctrlName"] = var["device"] + mea["group"] = var["group"] + mode = self.get_var_send_type(var["mode"]) if "mode" in var else "periodic" + mea["uploadType"] = mode + mea["dataType"] = var["data_type"] if "data_type" in var else "" + mea["decimal"] = var["float_repr"] if "float_repr" in var else 2 + mea["len"] = var["size"] if "size" in var else 2 + mea["readWrite"] = self.map_var_read_write(var) + mea["unit"] = var["unit"] if "unit" in var else "" + mea["desc"] = var["desc"] if "desc" in var else "" + mea["addr"] = self.get_opcua_var_addr(var) + mea["transformType"] = var["data_operation"] if ("data_operation" in var and var["data_operation"]) else 0 + if mea["transformType"] == 1: + mea["maxValue"] = str(var["max_limit"]) if "max_limit" in var else "" + mea["minValue"] = str(var["min_limit"]) if "min_limit" in var else "" + mea["maxScaleValue"] = str(var["max_scale"]) if "max_scale" in var else "" + mea["minScaleValue"] = str(var["min_scale"]) if "min_scale" in var else "" + else: + if mea["transformType"] == 2: + mea["gain"] = str(var["scale"]) if "scale" in var else "1.0" + mea["offset"] = str(var["offset"]) if "offset" in var else "0.0" + else: + if mea["transformType"] == 3: + mea["startBit"] = str(var["startBit"]) if "startBit" in var else 0 + mea["endBit"] = str(var["endBit"]) if "endBit" in var else 1 + self.db_cfg["measures"].append(mea) + else: + if protocol == "Easy-Com": + mea["name"] = var["var_name"] + mea["ctrlName"] = var["device"] + mea["group"] = var["group"] + mode = self.get_var_send_type(var["mode"]) if "mode" in var else "periodic" + mea["uploadType"] = mode + mea["dataType"] = var["data_type"] if "data_type" in var else "" + mea["decimal"] = var["float_repr"] if "float_repr" in var else 2 + mea["len"] = var["size"] if "size" in var else 2 + mea["readWrite"] = self.map_var_read_write(var) + mea["unit"] = var["unit"] if "unit" in var else "" + mea["desc"] = var["desc"] if "desc" in var else "" + mea["addr"] = "" + mea["data_endian_reverse"] = var["data_endian_reverse"] if "data_endian_reverse" in var else 0 + mea["rd_offset"] = var["rd_offset"] if "rd_offset" in var else 0 + mea["rd_length"] = var["rd_length"] if "rd_length" in var else 0 + mea["data_parse_method"] = var["data_parse_method"] if "data_parse_method" in var else "hex2dec" + mea["bit_id"] = var["bit_id"] if "bit_id" in var else 0 + mea["transformType"] = var["data_operation"] if ("data_operation" in var and var["data_operation"]) else 0 + if mea["transformType"] == 1: + mea["maxValue"] = str(var["max_limit"]) if "max_limit" in var else "" + mea["minValue"] = str(var["min_limit"]) if "min_limit" in var else "" + mea["maxScaleValue"] = str(var["max_scale"]) if "max_scale" in var else "" + mea["minScaleValue"] = str(var["min_scale"]) if "min_scale" in var else "" + else: + if mea["transformType"] == 2: + mea["gain"] = str(var["scale"]) if "scale" in var else "1.0" + mea["offset"] = str(var["offset"]) if "offset" in var else "0.0" + else: + if mea["transformType"] == 3: + mea["startBit"] = str(var["startBit"]) if "startBit" in var else 0 + mea["endBit"] = str(var["endBit"]) if "endBit" in var else 1 + self.db_cfg["measures"].append(mea) + else: + return + + def read_device_tb(self): + self.db_cfg["controllers"] = list() + self.db_cfg["measures"] = list() + tb = self.device_tb + keys = tb.keys() + if keys: + if len(keys) > 0: + for key in keys: + data = tb.get(key) + if data is None: + continue + protocol = data["protocol"] + dev = dict() + name = data["device_name"] + self.global_device_dict[name] = key + if protocol == "ISO-on-TCP": + dev["name"] = name + ip = data["ip"] + port = data["port"] + dev["endpoint"] = "%s:%s" % (ip, port) + dev["protocol"] = protocol + dev["expired"] = int(data["timeout"]) if "timeout" in data else 10000 + dev["samplePeriod"] = 10 + dev["args"] = dict() + mode = data["mode"] + if mode.lower() == "rack/slot": + dev["args"]["mode"] = "Rack/Slot" + dev["args"]["slot"] = data["slot"] + dev["args"]["rack"] = data["rack"] + else: + dev["args"]["mode"] = "TSAP" + dev["args"]["localTsap"] = data["local_tsap"] + dev["args"]["remoteTsap"] = data["remote_tsap"] + self.db_cfg["controllers"].append(dev) + else: + if protocol == "ModbusRTU" or protocol == "ModbusTCP": + dev["name"] = name + if protocol == "ModbusRTU": + protocol = "Modbus-RTU" + dev["endpoint"] = "rs232" if data["serial"] == "RS232" else "rs485" + else: + protocol = "Modbus-TCP" + ip = data["ip"] + port = data["port"] + dev["endpoint"] = "%s:%s" % (ip, port) + dev["protocol"] = protocol + dev["expired"] = int(data["timeout"]) if "timeout" in data else 10000 + dev["samplePeriod"] = 10 + dev["args"] = dict() + dev["args"]["slaveAddr"] = data["slave"] + dev["args"]["continuousAcquisition"] = 1 + dev["args"]["maxContinuousNumber"] = 50 + if "byte_order" in data: + bo = data["byte_order"] + dev["args"]["int16Ord"] = "ab" if "ab" in bo else "ba" + dev["args"]["int32Ord"] = bo + dev["args"]["float32Ord"] = bo + else: + dev["args"]["int16Ord"] = data["byte_order_int16"] if "byte_order_int16" in data else "ab" + dev["args"]["int32Ord"] = data["byte_order_int32"] if "byte_order_int32" in data else "abcd" + float32 = data["byte_order_float32"] if "byte_order_float32" in data else "abcd" + dev["args"]["float32Ord"] = float32 + self.db_cfg["controllers"].append(dev) + else: + if protocol == "EtherNet/IP": + dev["name"] = name + ip = data["ip"] + port = data["port"] + dev["endpoint"] = "%s:%s" % (ip, port) + dev["protocol"] = protocol + dev["expired"] = int(data["timeout"]) if "timeout" in data else 10000 + dev["samplePeriod"] = 10 + dev["args"] = dict() + self.db_cfg["controllers"].append(dev) + else: + if protocol == "OPC-UA": + dev["name"] = name + dev["endpoint"] = data["endpoint"] + dev["protocol"] = protocol + dev["expired"] = int(data["timeout"]) if "timeout" in data else 10000 + dev["samplePeriod"] = 10 + dev["args"] = dict() + dev["args"]["auth"] = data["auth"] + dev["args"]["continuousAcquisition"] = 1 + dev["args"]["maxContinuousNumber"] = 50 + self.db_cfg["controllers"].append(dev) + var_tb = self.db.get_table(name, tblType=(InDB.TBL_TYP_KV)) + var_keys = var_tb.keys() + if var_keys and len(var_keys) > 0: + for vk in var_keys: + var = var_tb.get(vk) + if var is None: + continue + self.map_var_to_measure(protocol, var) + + def read_group_tb(self): + self.db_cfg["groups"] = list() + tb = self.group_tb + keys = tb.keys() + if keys: + if len(keys) > 0: + for key in keys: + data = tb.get(key) + if data is None: + continue + grp = dict() + name = data["group_name"] + grp["name"] = name + interval = 10 + if "upload_interval" in data: + interval = int(data["upload_interval"]) if data["upload_interval"] else 10 + grp["uploadInterval"] = interval + if data["group_type"] == "collect": + self.db_cfg["groups"].append(grp) + else: + self.alarm_groups[name] = list() + + def mappine_op_value(self, op): + """ + # v2: eq等于, neq不等于, gt大于, egt大于等于, lt小于, elt小于等于, 默认eq + # v1: Eq(等于), Neq(不等于), Gt(大于), Gne(大于等于), Lne(小于等于), Lt(小于), 默认 Eq + :param op: + :return: + """ + op_n = "eq" + if op == "Eq": + op_n = "eq" + else: + if op == "Neq": + op_n = "neq" + else: + if op == "Gt": + op_n = "gt" + else: + if op == "Gne": + op_n = "egt" + else: + if op == "Lne": + op_n = "elt" + else: + if op == "Lt": + op_n = "lt" + return op_n + + def map_warn_to_alarm(self, data): + alm = dict() + if data["quotes"] == 0: + alm["deleted"] = "measure" + data["var_name"] = data["warn_name"] + alm["name"] = data["warn_name"] + alm["ctrlName"] = data["device"] + alm["measureName"] = data["var_name"] + alm["alarmLevel"] = 1 + alm["content"] = data["alarm_content"] + alm["cond1"] = dict() + alm["cond2"] = dict() + alm["cond1"]["op"] = self.mappine_op_value(data["condition1"]) + alm["cond1"]["value"] = str(data["operand1"]) + alm["condOp"] = data["combine_method"].lower() + alm["cond2"]["op"] = self.mappine_op_value(data["condition2"]) + alm["cond2"]["value"] = str(data["operand2"]) + self.db_cfg["alarms"].append(alm) + group = data["group"] + name = alm["name"] + self.alarm_groups[group].append(name) + + def check_measure_name_exist(self, name): + for measure in self.db_cfg["measures"]: + if name == measure["name"]: + return True + + return False + + def generate_measure_name(self, name): + if self.check_measure_name_exist(name): + temp = name.split("_") + temp[-1] = "%s" % (int(temp[-1]) + 1) + name = "_".join(temp) + return self.generate_measure_name(name) + return name + + def new_measure_by_alarm(self, protocol, data): + var_name = None + mea = dict() + mea["ctrlName"] = data["device"] + mea["desc"] = "" + mea["gain"] = "1.0" + mea["group"] = "" + mea["offset"] = "0.0" + mea["readWrite"] = "ro" + mea["transformType"] = 0 + mea["unit"] = "" + mea["uploadType"] = "never" + mea["decimal"] = data["float_repr"] if "float_repr" in data else 2 + mea["len"] = data["size"] if "size" in data else 2 + if protocol == "ISO-on-TCP": + mea["dataType"] = data["data_type"] if data["data_type"] != "BOOL" else "BIT" + mea["addr"] = self.get_snap7_var_addr(data) + var_name = self.generate_measure_name("%s_1" % data["warn_name"]) + mea["name"] = var_name + self.db_cfg["measures"].append(mea) + else: + if protocol in ('ModbusRTU', 'Modbus-RTU', 'ModbusTCP', 'Modbus-TCP'): + mea["dataType"] = data["data_type"] if data["data_type"] != "BOOL" else "BIT" + mea["addr"] = self.get_modbus_var_addr(data) + var_name = self.generate_measure_name("%s_1" % data["warn_name"]) + mea["name"] = var_name + self.db_cfg["measures"].append(mea) + return var_name + + def read_alarm_tb(self): + self.db_cfg["alarms"] = list() + tb = self.warn_tb + keys = tb.keys() + if keys: + if len(keys) > 0: + for key in keys: + data = tb.get(key) + if data is None: + continue + if data["quotes"] == 1: + self.map_warn_to_alarm(data) + else: + device_id = self.global_device_dict[data["device"]] + protocol = self.device_tb.get(device_id)["protocol"] + var_name = self.new_measure_by_alarm(protocol, data) + if var_name: + data["quotes"] = 1 + data["var_name"] = var_name + self.map_warn_to_alarm(data) + + def read_file_content(self, file_path): + if not os.path.exists(file_path): + logger.warn("Could not find %s" % file_path) + return "" + with open(file_path, "r", encoding="utf-8") as f: + content = f.read() + return content + + def read_cloud_tb(self): + self.db_cfg["clouds"] = list() + tb = self.remote_tb + keys = tb.keys() + if keys: + if len(keys) > 0: + for key in keys: + data = tb.get(key) + if data is None: + continue + cld = dict() + cloud_type = data["type"] if "type" in data else "Standard MQTT" + cld["enable"] = data["enable"] if "enable" in data else 0 + cld["name"] = "default" + cld["cacheSize"] = 1000 + cld["args"] = dict() + if cloud_type == "GeneralMqtt": + cloud_type = "Standard MQTT" + cld["args"]["host"] = data["host"] + cld["args"]["port"] = data["port"] + cld["args"]["clientId"] = data["client_id"] + cld["args"]["auth"] = data["authority"] + if data["authority"]: + cld["args"]["username"] = data["username"] + cld["args"]["passwd"] = data["passwd"] + cld["args"]["cleanSession"] = data["clean_session"] + cld["args"]["tls"] = data["tls"] + if int(data["tls"]) == 1: + cld["args"]["rootCA_content"] = self.read_file_content(data["capath"]) + if "capath" in data["capath"]: + if os.path.exists(data["capath"]): + path, _ = os.path.split(data["capath"]) + shutil.rmtree(path) + cld["args"]["rootCA"] = "/var/user/cfg/device_supervisor/mqtt/rootCA.pem" + cld["args"]["verifyServer"] = data["verify_server"] + cld["args"]["verifyClient"] = data["verify_client"] + if data["verify_client"]: + cld["args"]["key_content"] = self.read_file_content(data["client_keypath"]) + if "client_keypath" in data["client_keypath"]: + if os.path.exists(data["client_keypath"]): + path, _ = os.path.split(data["client_keypath"]) + shutil.rmtree(path) + cld["args"]["key"] = "/var/user/cfg/device_supervisor/mqtt/client.key" + cld["args"]["cert_content"] = self.read_file_content(data["client_certpath"]) + if "client_certpath" in data["client_certpath"]: + if os.path.exists(data["client_certpath"]): + path, _ = os.path.split(data["client_certpath"]) + shutil.rmtree(path) + cld["args"]["cert"] = "/var/user/cfg/device_supervisor/mqtt/client.crt" + cld["args"]["keepalive"] = data["keep_alive"] + cld["args"]["mqttVersion"] = "v3.1.1" if data["protocol"] == "MQTTv311" else "v3.1" + elif cloud_type == "AliyunLink": + cloud_type = "Aliyun IoT" + self.cloud_type = cloud_type + cld["args"]["host"] = data["host_name"] + cld["args"]["port"] = data["ali_port"] + cld["args"]["connType"] = data["conn_type"] + cld["args"]["productKey"] = data["product_key"] + cld["args"]["deviceName"] = data["device_name"] + cld["args"]["deviceSecret"] = data["device_secret"] + self.aliyun_product_key = data["product_key"] + self.aliyun_device_name = data["device_name"] + cld["args"]["productSecret"] = data["product_secret"] + cld["args"]["thingTSL_content"] = self.read_file_content(data["thing_tsl"]) + if "thing_tsl" in data["thing_tsl"]: + if os.path.exists(data["thing_tsl"]): + path, _ = os.path.split(data["thing_tsl"]) + up_path = os.path.dirname(path) + shutil.rmtree(up_path) + cld["args"]["thingTSL"] = "/var/user/cfg/device_supervisor/thing/tls.json" + cld["args"]["tls"] = data["ali_tls"] + cld["args"]["cleanSession"] = data["ali_clean_session"] + cld["args"]["keepalive"] = data["ali_keep_alive"] + cld["args"]["mqttVersion"] = "v3.1.1" if data["protocol"] == "MQTTv311" else "v3.1" + else: + pass + if cloud_type == "AWSIoT": + cloud_type = "AWS IoT" + if "aws_type" in data: + if data["aws_type"] == "GreenGrass Core": + break + cld["args"]["endPoint"] = data["aws_host"] + cld["args"]["port"] = data["aws_port"] + cld["args"]["clientId"] = data["aws_client_id"] + cld["args"]["cleanSession"] = data["aws_clean_session"] + cld["args"]["keepalive"] = data["aws_keep_alive"] + cld["args"]["mqttVersion"] = "v3.1.1" if data["protocol"] == "MQTTv311" else "v3.1" + cld["args"]["rootCA_content"] = self.read_file_content(data["aws_capath"]) + cld["args"]["key_content"] = self.read_file_content(data["pkpath"]) + cld["args"]["cert_content"] = self.read_file_content(data["certpath"]) + if "aws_capath" in data["aws_capath"]: + if os.path.exists(data["aws_capath"]): + path, _ = os.path.split(data["aws_capath"]) + shutil.rmtree(path) + cld["args"]["rootCA"] = "/var/user/cfg/device_supervisor/aws/rootCA.pem" + if "pkpath" in data["pkpath"]: + if os.path.exists(data["pkpath"]): + path, _ = os.path.split(data["pkpath"]) + shutil.rmtree(path) + cld["args"]["key"] = "/var/user/cfg/device_supervisor/aws/private.key" + if "certpath" in data["certpath"]: + if os.path.exists(data["certpath"]): + path, _ = os.path.split(data["certpath"]) + shutil.rmtree(path) + cld["args"]["cert"] = "/var/user/cfg/device_supervisor/aws/certificate.crt" + else: + if cloud_type == "AzureIoT": + cloud_type = "Azure IoT" + self.cloud_type = cloud_type + self.set_azure_iot_device_name(data["connection_string"]) + cld["args"]["connectionString"] = data["connection_string"] + cld["args"]["authType"] = "Symmetric key" + else: + continue + cld["type"] = cloud_type + self.db_cfg["clouds"].append(cld) + + if not self.db_cfg["clouds"]: + self.db_cfg["clouds"].append({'cacheSize':100, 'enable':0, 'name':"default", 'type':"Standard MQTT", 'args':{ + 'host': '""', 'port': 1883, 'clientId': '""', 'auth': 0, + 'tls': 0, + 'cleanSession': 0, 'mqttVersion': '"v3.1.1"', 'keepalive': 60, + 'key': '""', + 'cert': '""', 'rootCA': '""', 'verifyServer': 0, 'verifyClient': 0, + 'username': '""', 'passwd': '""'}}) + + def read_serail_tb(self): + if "misc" not in self.db_cfg: + self.db_cfg["misc"] = dict() + else: + self.db_cfg["misc"]["coms"] = list() + tb = self.serial_tb + keys = tb.keys() + if keys: + if len(keys) > 0: + for key in keys: + com = dict() + if "RS485" in key: + com["name"] = "rs485" + else: + if "RS232" in key: + com["name"] = "rs232" + else: + continue + data = tb.get(key) + if data is None: + continue + com["baud"] = data["baudrate"] + com["bits"] = data["bytesize"] + com["parityChk"] = data["parity"].lower() + com["stopbits"] = data["stopbits"] + self.db_cfg["misc"]["coms"].append(com) + + self.db_cfg["misc"]["coms"] = self.db_cfg["misc"]["coms"] or [ + { + 'name': '"rs232"', 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}, + { + 'name': '"rs485"', 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}] + + def read_global_tb(self): + self.db_cfg["labels"] = list() + tb = self.global_tb + keys = tb.keys() + label = dict() + label["key"] = "gateway_sn" + label["value"] = self.serial_number + self.db_cfg["labels"].append(label) + if keys: + if len(keys) > 0: + for key in keys: + data = tb.get(key) + label = dict() + label["key"] = data["key"] + label["value"] = data["value"] + self.db_cfg["labels"].append(label) + + def read_pub_tb(self): + if "quickfaas" not in self.db_cfg: + self.db_cfg["quickfaas"] = dict() + self.db_cfg["quickfaas"]["uploadFuncs"] = list() + tb = self.pub_tb + keys = tb.keys() + if keys: + if len(keys) > 0: + for key in keys: + data = tb.get(key) + if data["cloud_type"] != CLOUD_IDENTIFIER_MAPS[self.db_cfg["clouds"][0]["type"]]: + continue + else: + pub = dict() + name = data["name"] + group_type = data["group_type"] if "group_type" in data else "collect" + if group_type == "collect": + pub["groups"] = data["group"] + pub["trigger"] = "measure_event" + else: + alarm_names = list() + for gname in data["group"]: + if gname in self.alarm_groups: + alarm_names.extend(self.alarm_groups[gname]) + + pub["alarms"] = list(set(alarm_names)) + pub["trigger"] = "warning_event" + pub["name"] = name + pub["cloudName"] = "default" + pub["msgType"] = data["type"] if ("type" in data and data["type"] != 3) else 0 + if self.cloud_type == "Aliyun IoT": + if pub["msgType"] == 1: + pub["topic"] = "/sys/%s/%s/thing/event/property/post" % (self.aliyun_product_key, + self.aliyun_device_name) + else: + if pub["msgType"] == 2: + pub["topic"] = "/sys/%s/%s/thing/event/{tsl.event.identifer}/post" % (self.aliyun_product_key, + self.aliyun_device_name) + else: + if pub["msgType"] == 0: + pub["topic"] = "/%s/%s/%s" % (self.aliyun_product_key, self.aliyun_device_name, data["topic"]) + else: + if self.cloud_type == "Azure IoT": + pub["topic"] = "devices/%s/messages/events/" % self.azure_iot_device_name + else: + pub["topic"] = data["topic"] if "topic" in data else "" + pub["qos"] = data["qos"] if "qos" in data else 0 + pub["funcName"] = data["function_name"] + pub["script"] = data["script"] + self.db_cfg["quickfaas"]["uploadFuncs"].append(pub) + + def read_sub_tb(self): + if "quickfaas" not in self.db_cfg: + self.db_cfg["quickfaas"] = dict() + self.db_cfg["quickfaas"]["downloadFuncs"] = list() + tb = self.sub_tb + keys = tb.keys() + if keys: + if len(keys) > 0: + for key in keys: + data = tb.get(key) + if data["cloud_type"] != CLOUD_IDENTIFIER_MAPS[self.db_cfg["clouds"][0]["type"]]: + continue + sub = dict() + name = data["name"] + sub["name"] = name + sub["cloudName"] = "default" + sub["msgType"] = data["type"] if ("type" in data and data["type"] != 3) else 0 + if sub["msgType"] == 5: + sub["msgType"] = 3 + sub["trigger"] = "command_event" + sub["payload_type"] = "Plaintext" + if self.cloud_type == "Aliyun IoT": + if sub["msgType"] == 2: + sub["topic"] = "/sys/%s/%s/thing/service/property/set" % (self.aliyun_product_key, + self.aliyun_device_name) + else: + if sub["msgType"] == 1: + sub["topic"] = "/sys/%s/%s/thing/service/+" % (self.aliyun_product_key, + self.aliyun_device_name) + else: + if sub["msgType"] == 3 and "topic" in sub and "ext/" not in sub["topic"]: + sub["topic"] = "/sys/%s/%s/rrpc/request/+" % (self.aliyun_product_key, self.aliyun_device_name) + else: + if sub["msgType"] == 0: + sub["topic"] = "/%s/%s/%s" % (self.aliyun_product_key, self.aliyun_device_name, data["topic"]) + else: + sub["topic"] = data["topic"] + else: + if self.cloud_type == "Azure IoT": + sub["topic"] = "devices/%s/messages/devicebound/#" % self.azure_iot_device_name + else: + sub["topic"] = data["topic"] if "topic" in data else "" + sub["qos"] = data["qos"] if "qos" in data else 0 + sub["funcName"] = data["function_name"] + sub["script"] = data["script"] + self.db_cfg["quickfaas"]["downloadFuncs"].append(sub) + + def read_default_tb(self): + if "misc" not in self.db_cfg: + self.db_cfg["misc"] = dict() + self.db_cfg["misc"]["logLvl"] = "INFO" + self.db_cfg["misc"]["maxAlarmRecordSz"] = 2000 + tb = self.default_tb + keys = tb.keys() + if keys: + if len(keys) > 0: + for key in keys: + data = tb.get(key) + if key == "warning_recording": + self.db_cfg["misc"]["maxAlarmRecordSz"] = data[key] + elif key == "log_level": + self.db_cfg["misc"]["logLvl"] = data[key] + elif key == "catch_recording": + for cld in self.db_cfg["clouds"]: + cld["cacheSize"] = data[key] + else: + continue + continue \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/master/Config.py b/APPS_UNCOMPILED/src/master/Config.py new file mode 100644 index 0000000..d7ccf6a --- /dev/null +++ b/APPS_UNCOMPILED/src/master/Config.py @@ -0,0 +1,1182 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/Config.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 71137 bytes +import os, re, copy, json, shutil +from common.CloudType import StandardMQTT, AzureIoT, AliyunIoT, InspurYunzhou, HeilanV1_0, HeilanV2_0 +from common.Logger import logger +from common.Protocol import MC1E, MC3C, MC3COT, MC3E, MCPS, OMFT, OMFU, OMHLS, PANMEW, SIPPI, TATEKPROGRAM, Eip, EipPCCC, MbRtu, MbTcp, MbAscii, MbRtuOverTcp, OpcUa, OpcDa, Snap7, TATEKPROGRAM_OverTcp, Easycom, EasyEthernet, BFADS, IEC101, IEC104, OpcUa_PubSub +dsa_supervisord_config = '\n[program:device_supervisor]\nuser=root\npriority=100\ncommand=python /var/user/app/device_supervisor/device_supervisor\nautorestart=true\nstartsecs=10\nstopsignal=TERM\nstopasgroup=true\nkillasgroup=true\nredirect_stderr=true\nstdout_logfile=/var/user/log/device_supervisor.log\nstdout_logfile_maxbytes={logSize}MB\nstdout_logfile_backups={logNum}\nstdout_events_enabled=true\nenvironment=\n PYTHON_EGG_CACHE="/tmp/.device_supervisor",\n LD_LIBRARY_PATH="/var/user/app/device_supervisor/lib",\n PYTHONPATH="/var/user/app/device_supervisor/lib"\n' + +class UConfig(object): + + def __init__(self): + super().__init__() + self.controllers = list() + self.measures = list() + self.alarmLables = list() + self.alarms = list() + self.groups = list() + self.misc = dict() + self.clouds = list() + self.quickfaas = dict() + self.mindspheres = list() + self.labels = list() + self.modbusSlave = dict() + self.modbusRTUSlave = dict() + self.iec104Server = dict() + self.iec101Server = dict() + self.iec104Client = dict() + self.opcuaServer = dict() + self.iec61850Server = dict() + self.sl651Slave = dict() + self.hj212Client = dict() + self.bacnetServer = dict() + self.Dnp3Server = dict() + self.snmpAgent = dict() + self.southMetadata = dict() + self.bindMetadata = dict() + self.bindConfig = dict() + self.templates = dict() + self.version = str() + + +def find_measure_and_return_readWrite(iec104_measure, measure): + for i in range(0, len(measure)): + if measure[i]["name"] == iec104_measure["measureName"] and measure[i]["ctrlName"] == iec104_measure["ctrlName"]: + return measure[i]["readWrite"] + + +def init_env(app_path, model_name): + if "IG974" in model_name: + install_path = app_path + "/install_uclibc/usr" + else: + if model_name[None[:2]].startswith("EC") or model_name[None[:3]].startswith("HEC"): + install_path = app_path + "/install_ieos/usr" + else: + install_path = app_path + "/install_gnu/usr" + if os.path.exists(install_path): + logger.info("Init ENV(%s)" % model_name) + os.system("chmod +x %s/bin/*;cp -rf %s/bin/* %s" % (install_path, install_path, app_path)) + os.system("chmod +x %s/sbin/* %s/sbin/publish/Scanner;cp -rf %s/sbin/* %s" % ( + install_path, install_path, install_path, app_path)) + os.system("cp -rf %s/cfg/* %s" % (install_path, app_path)) + os.system("cp -rf %s/lib/* %s" % (install_path, app_path + "/lib")) + os.system("rm -rf %s/install_*" % app_path) + + +class Config(object): + + def __init__(self, mobiuspi, master): + self.filename = mobiuspi.app_cfg_path + self.app_path = mobiuspi.app_path + self.app_base_path = mobiuspi.app_base_path + self.app_run_base_path = mobiuspi.app_run_base_path + self.mobiuspi = mobiuspi + self.master = master + init_env(self.app_path, mobiuspi.model_name) + try: + if os.path.exists(self.app_base_path + "/data/dbhome/device_supervisor/group_tb.tbl"): + import master.AdaptConfig as AdaptConfig + self.v1_conf = AdaptConfig("device_supervisor", mobiuspi) + self.filename = self.app_base_path + "/cfg/device_supervisor/device_supervisor.cfg" + del self.v1_conf + except Exception as e: + try: + logger.error("Parse config(v1) error %s" % e) + finally: + e = None + del e + + self.model_name = mobiuspi.model_name + self.product_number = mobiuspi.product_number + self.ucfg = UConfig() + + def find_ctl_name(self, name): + for ctl in self.ucfg.controllers: + if ctl["name"] == name: + return ctl + + return False + + def find_mea_by_name(self, ctrlName, measureName): + for mea in self.ucfg.measures: + if mea["ctrlName"] == ctrlName and mea["name"] == measureName: + return mea + + return False + + def _get_app_version(self): + try: + with open((self.app_base_path + "/app/device_supervisor/device_supervisor.info"), "r", encoding="utf-8") as fp: + config = json.load(fp) + version = str(config["version"]) + except Exception as error: + try: + logger.warn("Unable get app version(%s)" % error) + version = str() + finally: + error = None + del error + + return version + + def _update_topic_by_index(self, topic, index, value): + topic_split = topic.split("/") + topic_split[int(index)] = str(value) + return "/".join(topic_split) + + def _find_ucfg_cloud(self, cloud_name): + for cloud in self.ucfg.clouds: + if cloud["name"] == cloud_name: + return cloud + + def _find_ucfg_mindsphere(self, mindsphere_name): + for mindsphere in self.ucfg.mindspheres: + if mindsphere["name"] == mindsphere_name: + return mindsphere + + def load_config_file(self, config=None): + if config is None: + logger.info("Load config file: %s" % self.filename) + try: + with open((self.filename), "r", encoding="utf-8") as f: + cfg = json.load(f) + except Exception: + logger.info("Load default config") + with open((self.app_path + "/config.ini"), "r", encoding="utf-8") as f: + cfg = json.load(f) + + else: + cfg = config + if "version" not in cfg or cfg["version"] != self._get_app_version(): + self.version_change_adapt_config(cfg) + else: + self.ucfg.__dict__ = cfg + self.misc_adapt_config(cfg) + self.check_external_storage() + if self.mobiuspi.model_name in ('EC942', 'EC954'): + self.set_coms_mode(self.ucfg.misc["coms"], self.mobiuspi.model_name) + logger.set_level(level=(self.ucfg.misc["logLvl"])) + if self.mobiuspi.model_name[None[:2]].startswith("EC") or self.mobiuspi.model_name[None[:3]].startswith("HEC"): + self.master.ec_update_log_timer.add(3) + + def is_storage_exist(self, path): + try: + import subprocess + result = subprocess.run("df", capture_output=True, text=True) + if result.returncode == 0: + for line in result.stdout.split("\n")[1[:None]]: + if "/dev/" in line.split(" ")[0] and line.split(" ")[-1] in path: + return True + + else: + logger.error("Run command df failed! error: %s" % result.stderr) + return False + except Exception as error: + try: + logger.error("command df failed! error: %s" % error) + return False + finally: + error = None + del error + + return False + + def check_external_storage(self): + for group in self.ucfg.groups: + if group["historyDataMode"] == "gateway" or self.is_storage_exist(group["historyDataPath"]): + os.system("mkdir -p %s" % group["historyDataPath"]) + + if self.ucfg.misc["cacheMode"] == "gateway" or self.is_storage_exist(self.ucfg.misc["cachePath"]): + os.system("mkdir -p %s" % self.ucfg.misc["cachePath"]) + if self.ucfg.misc["debugLogMode"] == "gateway" or self.is_storage_exist(self.ucfg.misc["debugLogPath"]): + os.system("mkdir -p %s" % self.ucfg.misc["debugLogPath"]) + + def set_coms_mode(self, coms, model_name): + for com in coms: + if com["name"] == "rs232": + if model_name == "EC954": + cmd = "echo '%s' > /sys/kernel/debug/p1_mode" % com["mode"][2[:None]] + else: + cmd = "echo '%s' > /sys/kernel/debug/com1_mode" % com["mode"][2[:None]] + else: + if com["name"] == "rs485": + if model_name == "EC954": + cmd = "echo '%s' > /sys/kernel/debug/p2_mode" % com["mode"][2[:None]] + else: + cmd = "echo '%s' > /sys/kernel/debug/com2_mode" % com["mode"][2[:None]] + else: + if model_name == "EC954": + if int(com["name"][3[:None]]) > 4: + continue + cmd = "echo '%s' > /sys/kernel/debug/p%s_mode" % (com["mode"][2[:None]], com["name"][3[:None]]) + else: + cmd = "echo '%s' > /sys/kernel/debug/com%s_mode" % (com["mode"][2[:None]], com["name"][3[:None]]) + os.system(cmd) + + def _generate_md5(self, data): + import hashlib + if not isinstance(data, str): + data = str(data) + md5_obj = hashlib.md5() + byte_data = data.encode(encoding="utf-8") + md5_obj.update(byte_data) + str_md5 = md5_obj.hexdigest() + return str_md5 + + def ec_update_log_timer_handler(self, evt=None, userdata=None): + new_config = dsa_supervisord_config.format(logSize=(self.ucfg.misc["logSize"]), logNum=(self.ucfg.misc["logNum"] - 1)) + with open((self.app_base_path + "/supervisor_subconf/device_supervisor.conf"), "a+", encoding="utf-8") as fp: + fp.seek(0) + old_config = fp.read() + if self._generate_md5(new_config) != self._generate_md5(old_config): + fp.truncate(0) + fp.write(new_config) + message = {"name": "device_supervisor"} + self.master.mqclient.publish("ds2/eventbus/system/restart/service", json.dumps(message)) + + def _find_labels_by_key(self, labels, key): + for label in labels: + if label["key"] == key: + return label + + def misc_adapt_config(self, cfg): + labels = copy.deepcopy(cfg["labels"]) + self.ucfg.labels = list() + label = self._find_labels_by_key(labels, "SN") + if label: + label["value"] = str(self.mobiuspi.serial_number) + else: + label = {'key':"SN", + 'value':str(self.mobiuspi.serial_number)} + self.ucfg.labels.append(label) + label = self._find_labels_by_key(labels, "MAC") + if label: + label["value"] = str(self.mobiuspi.product_mac) + else: + label = {'key':"MAC", + 'value':str(self.mobiuspi.product_mac)} + self.ucfg.labels.append(label) + for label in labels: + if label["key"] not in ('SN', 'MAC'): + self.ucfg.labels.append(label) + + if "IG532" in self.mobiuspi.model_name: + coms = [{'name': '"rs232"', 'baud': 9600, 'bits': 8, 'stopbits': 1, 'parityChk': '"n"'}, + { + 'name': '"rs485"', 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}, + { + 'name': '"com3"', 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}, + { + 'name': '"com4"', 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}] + if "-LRAS" in self.mobiuspi.product_number: + coms.append({'name': '"lora"', 'baud': 115200, 'bits': 8, 'stopbits': 1, 'parityChk': '"n"'}) + elif self.mobiuspi.model_name[None[:2]].startswith("EC") or self.mobiuspi.model_name[None[:3]].startswith("HEC"): + from common.InternalPath import EC_SYSTEM_INFO + try: + with open(EC_SYSTEM_INFO, "r", encoding="utf-8") as fp: + uarts = json.load(fp)["device_info"]["hardware_info"]["uart"] + except Exception: + uarts = dict() + + coms = list() + for uart in uarts: + if int("".join(filter(str.isdigit, uart["uart_name"]))) == 1: + com = { + 'name': '"rs232"', 'baud': 9600, 'bits': 8, + 'stopbits': 1, 'parityChk': '"n"'} + else: + if int("".join(filter(str.isdigit, uart["uart_name"]))) == 2: + com = { + 'name': '"rs485"', 'baud': 9600, + 'bits': 8, 'stopbits': 1, 'parityChk': '"n"'} + else: + com = {'name':"com%s" % ("".join(filter(str.isdigit, uart["uart_name"]))), + 'baud':9600, + 'bits':8, 'stopbits':1, 'parityChk':"n"} + if self.mobiuspi.model_name in ('EC942', 'EC954'): + com["mode"] = "rs485" + coms.append(com) + + else: + coms = [ + { + 'name': '"rs232"', 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}, + { + 'name': '"rs485"', 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}] + if len(self.ucfg.misc["coms"]) != len(coms): + self.ucfg.misc["coms"] = coms + self.filename = self.app_base_path + "/cfg/device_supervisor/device_supervisor.cfg" + self.dump_config_file() + + def version_change_adapt_ctrl_configParse error at or near `LOAD_FAST' instruction at offset 1460 + + def version_change_adapt_meas_config(self, measureCfg): + for i in range(0, len(measureCfg)): + if "pollCycle" not in measureCfg[i]: + measureCfg[i]["pollCycle"] = 0 + elif measureCfg[i]["dataType"] == "STRING": + if "codeType" not in measureCfg[i]: + measureCfg[i]["codeType"] = "UTF-8" + else: + if "alias" in measureCfg[i]: + measureCfg[i]["name"] = measureCfg[i]["alias"] + measureCfg[i].pop("alias") + elif "ctrlAlias" in measureCfg[i]: + measureCfg[i]["ctrlName"] = measureCfg[i]["ctrlAlias"] + measureCfg[i].pop("ctrlAlias") + if measureCfg[i]["dataType"] == "BIT": + if "bitMap" not in measureCfg[i]: + measureCfg[i]["bitMap"] = 0 + if measureCfg[i]["dataType"] == "BIT" and "reverseBit" not in measureCfg[i]: + measureCfg[i]["reverseBit"] = 0 + if measureCfg[i]["dataType"] in ('BYTE', 'SINT', 'WORD', 'INT', + 'DWORD', 'DINT', 'BCD', 'BCD32', + 'ULONG', 'LONG') and "enableBit" not in measureCfg[i]: + measureCfg[i]["enableBit"] = 0 + if measureCfg[i]["transformType"] in (1, 2, 4): + if "transDecimal" not in measureCfg[i]: + measureCfg[i]["transDecimal"] = 6 + elif measureCfg[i]["uploadType"] == "onChange": + if "enableBit" in measureCfg[i]: + if measureCfg[i]["enableBit"] == 0 and "deadZonePercent" not in measureCfg[i]: + measureCfg[i]["deadZonePercent"] = "0" + elif measureCfg[i]["dataType"] in ('FLOAT', 'DOUBLE'): + if "deadZonePercent" not in measureCfg[i]: + measureCfg[i]["deadZonePercent"] = "0" + ctr = self.find_ctl_name(measureCfg[i]["ctrlName"]) + if not ctr: + continue + if "enablePerOnchange" in ctr and ctr["enablePerOnchange"] == 1: + for j in range(0, len(self.ucfg.groups)): + if self.ucfg.groups[j]["name"] == measureCfg[i]["group"]: + if "enablePerOnchange" in self.ucfg.groups[j]: + if ctr["onchangePeriod"] < self.ucfg.groups[j]["onchangePeriod"]: + break + self.ucfg.groups[j]["enablePerOnchange"] = 1 + self.ucfg.groups[j]["onchangePeriod"] = ctr["onchangePeriod"] + break + + ctr = self.find_ctl_name(measureCfg[i]["ctrlName"]) + if not ctr: + continue + if ctr["protocol"] in ('Mitsubishi CPU Port(Serial)', 'Mitsubishi MC 3C', + 'Mitsubishi MC 3E'): + if "T" == re.search("\\D+", measureCfg[i]["addr"]).group(0): + measureCfg[i]["addr"] = measureCfg[i]["addr"].replace("T", "TN") + else: + if "C(16bit)" in measureCfg[i]["addr"]: + measureCfg[i]["addr"] = measureCfg[i]["addr"].replace("C(16bit)", "CN") + else: + if "C(32bit)" in measureCfg[i]["addr"]: + measureCfg[i]["addr"] = measureCfg[i]["addr"].replace("C(32bit)", "CN") + if measureCfg[i]["dataType"] == "STRING" and "len" not in measureCfg[i]: + measureCfg[i]["len"] = 2 + else: + if ctr["protocol"] in [MbRtu, MbTcp, MbAscii, MbRtuOverTcp, OpcUa, OpcDa, + Snap7, OpcUa_PubSub]: + if "msecSample" not in measureCfg[i]: + measureCfg[i]["msecSample"] = 0 + if ctr["protocol"] in [OpcUa, OpcUa_PubSub]: + if "arrayIndex" not in measureCfg[i]: + measureCfg[i]["arrayIndex"] = -1 + if ctr["protocol"] in [MbRtu, MbTcp, MbAscii, MbRtuOverTcp]: + if measureCfg[i]["dataType"] == "BIT" and not measureCfg[i]["addr"].startswith("3"): + if measureCfg[i]["addr"].startswith("4"): + measureCfg[i]["dataType"] = "WORD" + measureCfg[i]["enableBit"] = 1 + measureCfg[i]["bitIndex"] = int(measureCfg[i]["addr"].split(".")[1]) + if measureCfg[i]["uploadType"] != "never" and "storageLwTSDB" not in measureCfg[i]: + measureCfg[i]["storageLwTSDB"] = 0 + + def version_change_adapt_config(self, cfg): + logger.info("upgrade %s config to %s" % (cfg["version"] if "version" in cfg else "", self._get_app_version())) + if "controllers" in cfg: + if cfg["controllers"]: + self.ucfg.controllers = cfg["controllers"] + else: + self.ucfg.controllers = list() + for i in range(0, len(self.ucfg.controllers)): + self.version_change_adapt_ctrl_config(self.ucfg.controllers[i]) + + if "groups" in cfg and cfg["groups"]: + self.ucfg.groups = cfg["groups"] + haveDefault = 0 + for j in range(0, len(self.ucfg.groups)): + if self.ucfg.groups[j]["name"] == "default": + haveDefault = 1 + self.ucfg.groups[j]["_id"] = "group59b64649c93" + + if haveDefault == 0: + self.ucfg.groups.append({'name': '"default"', 'uploadInterval': 10, 'enablePerOnchange': 0, + 'LwTSDBSize': 1000, 'strategy': 1, 'historyDataMode': '"gateway"', + '_id': '"group59b64649c93"', + 'historyDataPath': '"/var/user/data/dbhome/device_supervisor/LwTSDB"'}) + else: + self.ucfg.groups = list() + self.ucfg.groups.append({'name': '"default"', 'uploadInterval': 10, 'enablePerOnchange': 0, 'LwTSDBSize': 1000, + 'strategy': 1, 'historyDataMode': '"gateway"', '_id': '"group59b64649c93"', + 'historyDataPath': '"/var/user/data/dbhome/device_supervisor/LwTSDB"'}) + if "measures" in cfg and cfg["measures"]: + self.ucfg.measures = cfg["measures"] + else: + self.ucfg.measures = list() + self.version_change_adapt_meas_config(self.ucfg.measures) + if "alarms" in cfg and cfg["alarms"]: + self.ucfg.alarms = cfg["alarms"] + else: + self.ucfg.alarms = list() + for i in range(0, len(self.ucfg.alarms)): + if "alias" in self.ucfg.alarms[i]: + self.ucfg.alarms[i]["name"] = self.ucfg.alarms[i]["alias"] + self.ucfg.alarms[i].pop("alias") + if "ctrlAlias" in self.ucfg.alarms[i]: + self.ucfg.alarms[i]["ctrlName"] = self.ucfg.alarms[i]["ctrlAlias"] + self.ucfg.alarms[i].pop("ctrlAlias") + if "measureAlias" in self.ucfg.alarms[i]: + self.ucfg.alarms[i]["measureName"] = self.ucfg.alarms[i]["measureAlias"] + self.ucfg.alarms[i].pop("measureAlias") + if "alarmLable" not in self.ucfg.alarms[i]: + self.ucfg.alarms[i]["alarmLable"] = "default" + + haveDefaultGroup = 0 + for i in range(0, len(self.ucfg.groups)): + if self.ucfg.groups[j]["name"] == "default": + haveDefaultGroup = 1 + self.ucfg.groups[j]["_id"] = "group59b64649c93" + if "LwTSDBSize" not in self.ucfg.groups[i]: + self.ucfg.groups[i]["LwTSDBSize"] = 1000 + if "strategy" not in self.ucfg.groups[i]: + self.ucfg.groups[i]["strategy"] = 1 + if not "historyDataMode" not in self.ucfg.groups[i]: + if "historyDataPath" not in self.ucfg.groups[i]: + if "historyDataPath" in self.ucfg.groups[i] and "/mnt/usb/" in self.ucfg.groups[i]["historyDataPath"]: + self.ucfg.groups[i]["historyDataMode"] = "usb" + else: + if "historyDataPath" in self.ucfg.groups[i] and "/mnt/sd/" in self.ucfg.groups[i]["historyDataPath"]: + self.ucfg.groups[i]["historyDataMode"] = "sd" + else: + self.ucfg.groups[i]["historyDataMode"] = "gateway" + self.ucfg.groups[i]["historyDataPath"] = "/var/user/data/dbhome/device_supervisor/LwTSDB" + if "enablePerOnchange" not in self.ucfg.groups[i]: + self.ucfg.groups[i]["enablePerOnchange"] = 0 + + if haveDefaultGroup == 0: + self.ucfg.groups.append({'name': '"default"', 'uploadInterval': 10, 'enablePerOnchange': 0, 'LwTSDBSize': 1000, + 'strategy': 1, 'historyDataMode': '"gateway"', '_id': '"group59b64649c93"', + 'historyDataPath': '"/var/user/data/dbhome/device_supervisor/LwTSDB"'}) + else: + if "misc" in cfg and cfg["misc"]: + self.ucfg.misc = cfg["misc"] + if "cacheMode" not in self.ucfg.misc or "cachePath" not in self.ucfg.misc: + if "cachePath" in self.ucfg.misc and "/mnt/usb/" in self.ucfg.misc["cachePath"]: + self.ucfg.misc["cacheMode"] = "usb" + elif "cachePath" in self.ucfg.misc and "/mnt/sd/" in self.ucfg.misc["cachePath"]: + self.ucfg.misc["cacheMode"] = "sd" + else: + self.ucfg.misc["cacheMode"] = "gateway" + self.ucfg.misc["cachePath"] = "/var/user/data/dbhome/device_supervisor/offlinedata" + if "cacheSize" not in self.ucfg.misc: + self.ucfg.misc["cacheSize"] = 10000 + elif "cacheUploadPeriod" not in self.ucfg.misc: + self.ucfg.misc["cacheUploadPeriod"] = 200 + if "cacheStrategy" not in self.ucfg.misc: + self.ucfg.misc["cacheStrategy"] = 0 + if "pubTimeout" not in self.ucfg.misc: + self.ucfg.misc["pubTimeout"] = 1000 + if "pubRepeatNum" not in self.ucfg.misc: + self.ucfg.misc["pubRepeatNum"] = 3 + if "debugLogMode" not in self.ucfg.misc or "debugLogPath" not in self.ucfg.misc: + if "debugLogPath" in self.ucfg.misc and "/mnt/usb/" in self.ucfg.misc["debugLogPath"]: + self.ucfg.misc["debugLogMode"] = "usb" + else: + if "debugLogPath" in self.ucfg.misc and "/mnt/sd/" in self.ucfg.misc["debugLogPath"]: + self.ucfg.misc["debugLogMode"] = "sd" + else: + self.ucfg.misc["debugLogMode"] = "gateway" + self.ucfg.misc["debugLogPath"] = "/var/user/data/dbhome/device_supervisor/debugLog" + if "debugLogSize" not in self.ucfg.misc: + self.ucfg.misc["debugLogSize"] = 2000 + if "logNum" not in self.ucfg.misc: + self.ucfg.misc["logNum"] = 2 + if "logSize" not in self.ucfg.misc: + self.ucfg.misc["logSize"] = 1 + else: + if self.mobiuspi.model_name in ('EC942', 'EC954'): + for com in self.ucfg.misc["coms"]: + if "mode" not in com: + com["mode"] = "rs485" + + else: + if self.mobiuspi.model_name in ('EC942', 'EC954'): + coms = [ + { + 'mode': '"rs485"', 'name': '"rs232"', + 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}, + { + 'mode': '"rs485"', 'name': '"rs485"', + 'baud': 9600, 'bits': 8, 'stopbits': 1, + 'parityChk': '"n"'}] + else: + coms = [ + { + 'name': '"rs232"', 'baud': 9600, 'bits': 8, + 'stopbits': 1, 'parityChk': '"n"'}, + { + 'name': '"rs485"', 'baud': 9600, 'bits': 8, + 'stopbits': 1, 'parityChk': '"n"'}] + self.ucfg.misc = { + 'maxAlarmRecordSz': 2000, 'logLvl': '"INFO"', 'logNum': 2, + 'logSize': 1, 'cacheSize': 10000, + 'cacheMode': '"gateway"', 'cachePath': '"/var/user/data/dbhome/device_supervisor/offlinedata"', + 'debugLogSize': 2000, 'debugLogMode': '"gateway"', + 'cacheUploadPeriod': 200, + 'debugLogPath': '"/var/user/data/dbhome/device_supervisor/debugLog"', + 'coms': coms} + if "clouds" in cfg: + if cfg["clouds"]: + self.ucfg.clouds = cfg["clouds"] + for j in range(0, len(self.ucfg.clouds)): + if self.ucfg.clouds[j]["name"] == "default": + self.ucfg.clouds[j]["_id"] = "cloud59b6464bd03" + + else: + cloud_default_config = {'cacheSize':10000, + 'cachePath':"/var/user/data/dbhome/device_supervisor/offlinedata", 'enable':0, + 'name':"default", 'type':StandardMQTT, 'cacheUploadPeriod':200, '_id':"cloud59b6464bd03", + 'args':{ + 'host': '""', 'port': 1883, 'clientId': '""', + 'auth': 0, 'tls': 0, 'cleanSession': 0, + 'mqttVersion': '"v3.1.1"', 'keepalive': 60, + 'key': '""', 'cert': '""', 'rootCA': '""', + 'tlsAuth': '"caSelfSigned"', 'verifyServer': 1, + 'username': '""', 'passwd': '""', + 'willQos': 0, 'willRetain': 0, 'willTopic': '""', + 'willPayload': '""'}} + self.ucfg.clouds = list() + self.ucfg.clouds.append(cloud_default_config) + for cloud in self.ucfg.clouds: + if cloud["type"] == AzureIoT: + if cloud["args"]["authType"] == 1: + cloud["args"]["authType"] = "Symmetric key" + else: + if cloud["args"]["authType"] == 2: + cloud["args"]["authType"] = "X.509 Self-Signed" + else: + if cloud["args"]["authType"] == 3: + cloud["args"]["authType"] = "X.509 CA Signed" + else: + if cloud["type"] in [StandardMQTT, InspurYunzhou, HeilanV1_0, HeilanV2_0]: + if "willQos" not in cloud["args"]: + cloud["args"]["willQos"] = 0 + if "willRetain" not in cloud["args"]: + cloud["args"]["willRetain"] = 0 + if "willTopic" not in cloud["args"]: + cloud["args"]["willTopic"] = "" + if "willPayload" not in cloud["args"]: + cloud["args"]["willPayload"] = "" + if cloud["type"] in [HeilanV1_0]: + cloud["args"]["AlarmRecoveryUpload"] = 0 + if "tlsAuth" not in cloud["args"]: + cloud["args"]["tlsAuth"] = "caSelfSigned" + if "uploadRules" not in cloud: + cloud["uploadRules"] = list() + if "uploadRules" in cloud: + for i in range(0, len(cloud["uploadRules"])): + if "ctrlAlias" in cloud["uploadRules"][i]: + cloud["uploadRules"][i]["ctrlName"] = cloud["uploadRules"][i]["ctrlAlias"] + cloud["uploadRules"][i].pop("ctrlAlias") + if "measureAlias" in cloud["uploadRules"][i]: + cloud["uploadRules"][i]["measureName"] = cloud["uploadRules"][i]["measureAlias"] + cloud["uploadRules"][i].pop("measureAlias") + + if "mindspheres" in cfg and cfg["mindspheres"]: + self.ucfg.mindspheres = cfg["mindspheres"] + for i in range(0, len(self.ucfg.mindspheres)): + if self.ucfg.mindspheres[i]["name"] == "mindsphere": + self.ucfg.mindspheres[i]["_id"] = "mindsphereeafcdf" + + else: + mindspheres_default_config = {'name':"mindsphere", + 'enable':0, '_id':"mindsphereeafcdf", 'args':{ + 'hostEnvironment': '"eu1"', 'hostDomain': '"mindsphere.io"', + 'appName': '""', 'appVersion': '""', + 'clientId': '""', 'clientSecret': '""', + 'authType': '"tenant"', 'hostTenant': '""', + 'userTenant': '""', + 'timeout': 10, 'statusTimeout': 300, + 'enableOfflinePut': 0}, + 'mindsphereputs':[]} + self.ucfg.mindspheres.clear() + self.ucfg.mindspheres.append(mindspheres_default_config) + if "quickfaas" in cfg and cfg["quickfaas"]: + self.ucfg.quickfaas = cfg["quickfaas"] + if "genericFuncs" not in self.ucfg.quickfaas: + self.ucfg.quickfaas["genericFuncs"] = list() + if "uploadFuncs" not in self.ucfg.quickfaas: + self.ucfg.quickfaas["uploadFuncs"] = list() + if "downloadFuncs" not in self.ucfg.quickfaas: + self.ucfg.quickfaas["downloadFuncs"] = list() + else: + self.ucfg.quickfaas = {'genericFuncs':[], 'uploadFuncs':[], 'downloadFuncs':[]} + for uploadFunc in self.ucfg.quickfaas["uploadFuncs"]: + if "hideOfflineData" not in uploadFunc: + uploadFunc["hideOfflineData"] = 0 + else: + cloud = self._find_ucfg_cloud(uploadFunc["cloudName"]) + if cloud is None: + self.ucfg.quickfaas["uploadFuncs"].remove(uploadFunc) + continue + if cloud["type"] == AzureIoT: + if uploadFunc["topic"][0] == "/": + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 2, "{DeviceId}") + else: + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 1, "{DeviceId}") + if cloud["type"] == AliyunIoT: + if uploadFunc["msgType"] == 0: + if uploadFunc["topic"][0] == "/": + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 1, "{ProductKey}") + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 2, "{DeviceName}") + else: + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 0, "{ProductKey}") + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 1, "{DeviceName}") + elif not uploadFunc["msgType"] == 1: + if uploadFunc["msgType"] == 2: + pass + if uploadFunc["topic"][0] == "/": + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 2, "{ProductKey}") + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 3, "{DeviceName}") + else: + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 1, "{ProductKey}") + uploadFunc["topic"] = self._update_topic_by_index(uploadFunc["topic"], 2, "{DeviceName}") + + for downloadFunc in self.ucfg.quickfaas["downloadFuncs"]: + cloud = self._find_ucfg_cloud(downloadFunc["cloudName"]) + if cloud is None: + self.ucfg.quickfaas["downloadFuncs"].remove(downloadFunc) + continue + else: + if cloud["type"] == AzureIoT: + if downloadFunc["topic"][0] == "/": + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 2, "{DeviceId}") + else: + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 1, "{DeviceId}") + if cloud["type"] == AliyunIoT: + if downloadFunc["msgType"] == 0: + if downloadFunc["topic"][0] == "/": + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 1, "{ProductKey}") + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 2, "{DeviceName}") + else: + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 0, "{ProductKey}") + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 1, "{DeviceName}") + elif not (downloadFunc["msgType"] == 1 or downloadFunc["msgType"] == 2): + if downloadFunc["msgType"] == 3: + pass + if downloadFunc["topic"][0] == "/": + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 2, "{ProductKey}") + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 3, "{DeviceName}") + else: + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 1, "{ProductKey}") + downloadFunc["topic"] = self._update_topic_by_index(downloadFunc["topic"], 2, "{DeviceName}") + + if "alarmLables" in cfg and cfg["alarmLables"]: + self.ucfg.alarmLables = cfg["alarmLables"] + else: + self.ucfg.alarmLables = list() + self.ucfg.alarmLables.append("default") + for i in range(0, len(self.ucfg.quickfaas["uploadFuncs"])): + if self.ucfg.quickfaas["uploadFuncs"][i]["trigger"] == "warning_event": + self.ucfg.quickfaas["uploadFuncs"][i]["alarms"].clear() + self.ucfg.quickfaas["uploadFuncs"][i]["alarms"].append("default") + + if "modbusSlave" in cfg: + if cfg["modbusSlave"]: + self.ucfg.modbusSlave = cfg["modbusSlave"] + if "useRawvalue" not in self.ucfg.modbusSlave: + self.ucfg.modbusSlave["useRawvalue"] = 1 + if "mapping_table" in self.ucfg.modbusSlave: + if self.ucfg.modbusSlave["mapping_table"]: + for i in range(0, len(self.ucfg.modbusSlave["mapping_table"])): + mea = self.find_mea_by_name(self.ucfg.modbusSlave["mapping_table"][i]["ctrlName"], self.ucfg.modbusSlave["mapping_table"][i]["measureName"]) + if mea: + if "northDataType" not in self.ucfg.modbusSlave["mapping_table"][i]: + if "enableBit" in self.ucfg.modbusSlave["mapping_table"][i]: + if self.ucfg.modbusSlave["mapping_table"][i]["enableBit"] == 0: + self.ucfg.modbusSlave["mapping_table"][i]["northDataType"] = mea["dataType"] + else: + self.ucfg.modbusSlave["mapping_table"][i]["northDataType"] = "BIT" + else: + self.ucfg.modbusSlave["mapping_table"][i]["northDataType"] = mea["dataType"] + if "ctrlAlias" in self.ucfg.modbusSlave["mapping_table"][i]: + self.ucfg.modbusSlave["mapping_table"][i]["ctrlName"] = self.ucfg.modbusSlave["mapping_table"][i]["ctrlAlias"] + self.ucfg.modbusSlave["mapping_table"][i].pop("ctrlAlias") + if "measureAlias" in self.ucfg.modbusSlave["mapping_table"][i]: + self.ucfg.modbusSlave["mapping_table"][i]["measureName"] = self.ucfg.modbusSlave["mapping_table"][i]["measureAlias"] + self.ucfg.modbusSlave["mapping_table"][i].pop("measureAlias") + if self.ucfg.modbusSlave["mapping_table"][i]["dataType"] in ('BYTE', + 'SINT', + 'WORD', + 'INT', + 'DWORD', + 'DINT', + 'BCD', + 'BCD32', + 'ULONG', + 'LONG') and "enableBit" not in self.ucfg.modbusSlave["mapping_table"][i]: + self.ucfg.modbusSlave["mapping_table"][i]["enableBit"] = 0 + + if "mappingTable" not in self.ucfg.modbusSlave: + self.ucfg.modbusSlave["mappingTable"] = [{'name':"1", + 'slaveAddr':1, 'measures':[], '_id':"modbusTCPSlave01"}] + if "mapping_table" in self.ucfg.modbusSlave: + self.ucfg.modbusSlave["mappingTable"][0] = {'name':str(self.ucfg.modbusSlave["slaveAddr"]), 'slaveAddr':int(self.ucfg.modbusSlave["slaveAddr"]), + 'measures':self.ucfg.modbusSlave["mapping_table"], + '_id':"modbusTCPSlave01"} + elif self.ucfg.modbusSlave["mappingTable"][0] and self.ucfg.modbusSlave["mappingTable"][0]["name"] == "1": + self.ucfg.modbusSlave["mappingTable"][0]["_id"] = "modbusTCPSlave01" + else: + self.ucfg.modbusSlave = {'enable':0, + 'protocol':"Modbus-TCP", 'port':502, 'slaveAddr':1, 'int16Ord':"ab", 'int32Ord':"abcd", + 'float32Ord':"abcd", 'maxConnection':5, 'useRawvalue':1, 'mappingTable':[ + {'name':"1", + 'slaveAddr':1, 'measures':[], '_id':"modbusTCPSlave01"}]} + if "modbusRTUSlave" in cfg: + if cfg["modbusRTUSlave"]: + self.ucfg.modbusRTUSlave = cfg["modbusRTUSlave"] + if "useRawvalue" not in self.ucfg.modbusRTUSlave: + self.ucfg.modbusRTUSlave["useRawvalue"] = 1 + if "mapping_table" in self.ucfg.modbusRTUSlave: + if self.ucfg.modbusRTUSlave["mapping_table"]: + for i in range(0, len(self.ucfg.modbusRTUSlave["mapping_table"])): + mea = self.find_mea_by_name(self.ucfg.modbusRTUSlave["mapping_table"][i]["ctrlName"], self.ucfg.modbusRTUSlave["mapping_table"][i]["measureName"]) + if mea: + if "northDataType" not in self.ucfg.modbusRTUSlave["mapping_table"][i]: + if "enableBit" in self.ucfg.modbusRTUSlave["mapping_table"][i]: + if self.ucfg.modbusRTUSlave["mapping_table"][i]["enableBit"] == 0: + self.ucfg.modbusRTUSlave["mapping_table"][i]["northDataType"] = mea["dataType"] + else: + self.ucfg.modbusRTUSlave["mapping_table"][i]["northDataType"] = "BIT" + else: + self.ucfg.modbusRTUSlave["mapping_table"][i]["northDataType"] = mea["dataType"] + if "ctrlAlias" in self.ucfg.modbusRTUSlave["mapping_table"][i]: + self.ucfg.modbusRTUSlave["mapping_table"][i]["ctrlName"] = self.ucfg.modbusRTUSlave["mapping_table"][i]["ctrlAlias"] + self.ucfg.modbusRTUSlave["mapping_table"][i].pop("ctrlAlias") + if "measureAlias" in self.ucfg.modbusRTUSlave["mapping_table"][i]: + self.ucfg.modbusRTUSlave["mapping_table"][i]["measureName"] = self.ucfg.modbusRTUSlave["mapping_table"][i]["measureAlias"] + self.ucfg.modbusRTUSlave["mapping_table"][i].pop("measureAlias") + if self.ucfg.modbusRTUSlave["mapping_table"][i]["dataType"] in ('BYTE', + 'SINT', + 'WORD', + 'INT', + 'DWORD', + 'DINT', + 'BCD', + 'BCD32', + 'ULONG', + 'LONG') and "enableBit" not in self.ucfg.modbusRTUSlave["mapping_table"][i]: + self.ucfg.modbusRTUSlave["mapping_table"][i]["enableBit"] = 0 + + if "mappingTable" not in self.ucfg.modbusRTUSlave: + self.ucfg.modbusRTUSlave["mappingTable"] = [{'name':"1", + 'slaveAddr':1, 'measures':[], '_id':"modbusRTUSlave01"}] + if "mapping_table" in self.ucfg.modbusRTUSlave: + self.ucfg.modbusRTUSlave["mappingTable"][0] = {'name':str(self.ucfg.modbusRTUSlave["slaveAddr"]), 'slaveAddr':int(self.ucfg.modbusRTUSlave["slaveAddr"]), + 'measures':self.ucfg.modbusRTUSlave["mapping_table"], + '_id':"modbusRTUSlave01"} + elif self.ucfg.modbusRTUSlave["mappingTable"][0] and self.ucfg.modbusRTUSlave["mappingTable"][0]["name"] == "1": + self.ucfg.modbusRTUSlave["mappingTable"][0]["_id"] = "modbusRTUSlave01" + else: + self.ucfg.modbusRTUSlave = {'enable':0, + 'protocol':"Modbus-RTU", 'coms':"rs485", 'slaveAddr':1, 'int16Ord':"ab", 'int32Ord':"abcd", + 'float32Ord':"abcd", 'mappingTable':[ + {'name':"1", + 'slaveAddr':1, 'measures':[], '_id':"modbusRTUSlave01"}]} + if "iec104Server" in cfg: + if cfg["iec104Server"]: + self.ucfg.iec104Server = cfg["iec104Server"] + if "useRawvalue" not in self.ucfg.iec104Server: + self.ucfg.iec104Server["useRawvalue"] = 1 + if "connectMode" not in self.ucfg.iec104Server: + self.ucfg.iec104Server["connectMode"] = "tcpServer" + if "enableSpontaneous" not in self.ucfg.iec104Server: + self.ucfg.iec104Server["enableSpontaneous"] = 1 + if "asduLen" not in self.ucfg.iec104Server: + self.ucfg.iec104Server["asduLen"] = 2 + if "mapping_table" in self.ucfg.iec104Server and self.ucfg.iec104Server["mapping_table"]: + for i in range(0, len(self.ucfg.iec104Server["mapping_table"])): + mea = self.find_mea_by_name(self.ucfg.iec104Server["mapping_table"][i]["ctrlName"], self.ucfg.iec104Server["mapping_table"][i]["measureName"]) + if mea: + if "northDataType" not in self.ucfg.iec104Server["mapping_table"][i]: + if "enableBit" in self.ucfg.iec104Server["mapping_table"][i]: + if self.ucfg.iec104Server["mapping_table"][i]["enableBit"] == 0: + self.ucfg.iec104Server["mapping_table"][i]["northDataType"] = mea["dataType"] + else: + self.ucfg.iec104Server["mapping_table"][i]["northDataType"] = "BIT" + else: + self.ucfg.iec104Server["mapping_table"][i]["northDataType"] = mea["dataType"] + if "ctrlAlias" in self.ucfg.iec104Server["mapping_table"][i]: + self.ucfg.iec104Server["mapping_table"][i]["ctrlName"] = self.ucfg.iec104Server["mapping_table"][i]["ctrlAlias"] + self.ucfg.iec104Server["mapping_table"][i].pop("ctrlAlias") + if "measureAlias" in self.ucfg.iec104Server["mapping_table"][i]: + self.ucfg.iec104Server["mapping_table"][i]["measureName"] = self.ucfg.iec104Server["mapping_table"][i]["measureAlias"] + self.ucfg.iec104Server["mapping_table"][i].pop("measureAlias") + if self.ucfg.iec104Server["mapping_table"][i]["dataType"] in ('BYTE', + 'SINT', + 'WORD', + 'INT', + 'DWORD', + 'DINT', + 'BCD', + 'BCD32', + 'ULONG', + 'LONG') and "enableBit" not in self.ucfg.iec104Server["mapping_table"][i]: + self.ucfg.iec104Server["mapping_table"][i]["enableBit"] = 0 + + else: + self.ucfg.iec104Server["mapping_table"] = [] + else: + self.ucfg.iec104Server = {'enable':0, + 'cotSize':2, 'asduLen':2, 'port':2404, 'serverList':[{"asduAddr": 1}], 'kValue':12, 'wValue':8, + 't0':30, 't1':15, 't2':10, 't3':20, 'maximumLink':5, 'timeSet':1, 'byteOrder':"abcd", + 'useRawvalue':1, 'connectMode':"tcpServer", 'enableSpontaneous':1, 'mapping_table':[]} + if "iec101Server" in cfg: + if cfg["iec101Server"]: + self.ucfg.iec101Server = cfg["iec101Server"] + if "useRawvalue" not in self.ucfg.iec101Server: + self.ucfg.iec101Server["useRawvalue"] = 1 + elif "enableSpontaneous" not in self.ucfg.iec101Server: + self.ucfg.iec101Server["enableSpontaneous"] = 1 + elif "protocolMode" not in self.ucfg.iec101Server: + self.ucfg.iec101Server["protocolMode"] = 0 + if "mappingTable" not in self.ucfg.iec101Server: + if "mapping_table" in self.ucfg.iec101Server: + if isinstance(self.ucfg.iec101Server["mapping_table"], list): + self.ucfg.iec101Server["mappingTable"] = {'YX':[], 'YC':[], 'YK':[]} + if isinstance(self.ucfg.iec101Server["mapping_table"], dict): + self.ucfg.iec101Server["mappingTable"] = self.ucfg.iec101Server["mapping_table"] + else: + self.ucfg.iec101Server["mappingTable"] = {'YX':[], 'YC':[], 'YK':[]} + for yx in self.ucfg.iec101Server["mappingTable"]["YX"]: + mea = self.find_mea_by_name(yx["ctrlName"], yx["measureName"]) + if mea and "northDataType" not in yx: + if "enableBit" in yx: + if yx["enableBit"] == 0: + yx["northDataType"] = mea["dataType"] + else: + yx["northDataType"] = "BIT" + else: + yx["northDataType"] = mea["dataType"] + + for yc in self.ucfg.iec101Server["mappingTable"]["YC"]: + mea = self.find_mea_by_name(yc["ctrlName"], yc["measureName"]) + if mea and "northDataType" not in yc: + if "enableBit" in yc: + if yc["enableBit"] == 0: + yc["northDataType"] = mea["dataType"] + else: + yc["northDataType"] = "BIT" + else: + yc["northDataType"] = mea["dataType"] + + for yk in self.ucfg.iec101Server["mappingTable"]["YK"]: + mea = self.find_mea_by_name(yk["ctrlName"], yk["measureName"]) + if mea and "northDataType" not in yk: + if "enableBit" in yk: + if yk["enableBit"] == 0: + yk["northDataType"] = mea["dataType"] + else: + yk["northDataType"] = "BIT" + else: + yk["northDataType"] = mea["dataType"] + + else: + self.ucfg.iec101Server = {'enable':0, + 'coms':"rs485", 'mode':"UnBalance", 'protocolMode':0, 'linkLen':2, 'linkAddr':1, 'asduLen':2, + 'ioaLen':3, 'cotLen':2, 'serverList':[{"asduAddr": 1}], 'linkTimeOut':2000, 'timeSet':1, 'idleTimeOut':10000, + 'byteOrder':"abcd", 'useRawvalue':1, 'enableSpontaneous':1, 'mappingTable':{'YX':[], 'YC':[], 'YK':[]}} + if "iec104Client" in cfg: + if cfg["iec104Client"]: + self.ucfg.iec104Client = cfg["iec104Client"] + if "mapping_table" not in self.ucfg.iec104Client: + self.ucfg.iec104Client["mapping_table"] = {'YX':[], 'YC':[], 'YK':[]} + for yx in self.ucfg.iec104Client["mapping_table"]["YX"]: + if "readWrite" not in yx: + yx["readWrite"] = find_measure_and_return_readWrite(yx, self.ucfg.measures) + if "ctrlAlias" in yx: + yx["ctrlName"] = yx["ctrlAlias"] + yx.pop("ctrlAlias") + if "measureAlias" in yx: + yx["measureName"] = yx["measureAlias"] + yx.pop("measureAlias") + if yx["dataType"] in ('BYTE', 'SINT', 'WORD', 'INT', 'DWORD', + 'DINT', 'BCD', 'BCD32', 'ULONG', + 'LONG') and "enableBit" not in yx: + yx["enableBit"] = 0 + + for yc in self.ucfg.iec104Client["mapping_table"]["YC"]: + if "readWrite" not in yc: + yc["readWrite"] = find_measure_and_return_readWrite(yc, self.ucfg.measures) + if "ctrlAlias" in yc: + yc["ctrlName"] = yc["ctrlAlias"] + yc.pop("ctrlAlias") + if "measureAlias" in yc: + yc["measureName"] = yc["measureAlias"] + yc.pop("measureAlias") + if yc["dataType"] in ('BYTE', 'SINT', 'WORD', 'INT', 'DWORD', + 'DINT', 'BCD', 'BCD32', 'ULONG', + 'LONG') and "enableBit" not in yc: + yc["enableBit"] = 0 + + for yk in self.ucfg.iec104Client["mapping_table"]["YK"]: + if "readWrite" not in yk: + yk["readWrite"] = find_measure_and_return_readWrite(yk, self.ucfg.measures) + if "ctrlAlias" in yk: + yk["ctrlName"] = yk["ctrlAlias"] + yk.pop("ctrlAlias") + if "measureAlias" in yk: + yk["measureName"] = yk["measureAlias"] + yk.pop("measureAlias") + if yk["dataType"] in ('BYTE', 'SINT', 'WORD', 'INT', 'DWORD', + 'DINT', 'BCD', 'BCD32', 'ULONG', + 'LONG') and "enableBit" not in yk: + yk["enableBit"] = 0 + + else: + self.ucfg.iec104Client = {'enable':0, + 'connectType':2, 'serverAddr':"ipower.inhandcloud.cn", 'serverPort':2406, 'communicationCode':"", + 'protocol':1, 'asduAddr':1, 'tls':1, 'verification':1, 'mapping_table':{'YX':[], 'YC':[], 'YK':[]}} + if "opcuaServer" in cfg and cfg["opcuaServer"]: + self.ucfg.opcuaServer = cfg["opcuaServer"] + self.ucfg.opcuaServer["securityMode"] = 0 + if "certificate" not in self.ucfg.opcuaServer: + self.ucfg.opcuaServer["certificate"] = "None" + if "privateKey" not in self.ucfg.opcuaServer: + self.ucfg.opcuaServer["privateKey"] = "None" + if "pubsub" not in self.ucfg.opcuaServer: + self.ucfg.opcuaServer["pubsub"] = 0 + if "useRawvalue" not in self.ucfg.opcuaServer: + self.ucfg.opcuaServer["useRawvalue"] = 1 + if "mapping_table" in self.ucfg.opcuaServer and self.ucfg.opcuaServer["mapping_table"]: + for i in range(0, len(self.ucfg.opcuaServer["mapping_table"])): + mea = self.find_mea_by_name(self.ucfg.opcuaServer["mapping_table"][i]["ctrlName"], self.ucfg.opcuaServer["mapping_table"][i]["measureName"]) + if mea: + if "northDataType" not in self.ucfg.opcuaServer["mapping_table"][i]: + if "enableBit" in self.ucfg.opcuaServer["mapping_table"][i]: + if self.ucfg.opcuaServer["mapping_table"][i]["enableBit"] == 0: + self.ucfg.opcuaServer["mapping_table"][i]["northDataType"] = mea["dataType"] + else: + self.ucfg.opcuaServer["mapping_table"][i]["northDataType"] = "BIT" + else: + self.ucfg.opcuaServer["mapping_table"][i]["northDataType"] = mea["dataType"] + if "ctrlAlias" in self.ucfg.opcuaServer["mapping_table"][i]: + self.ucfg.opcuaServer["mapping_table"][i]["ctrlName"] = self.ucfg.opcuaServer["mapping_table"][i]["ctrlAlias"] + self.ucfg.opcuaServer["mapping_table"][i].pop("ctrlAlias") + if "measureAlias" in self.ucfg.opcuaServer["mapping_table"][i]: + self.ucfg.opcuaServer["mapping_table"][i]["measureName"] = self.ucfg.opcuaServer["mapping_table"][i]["measureAlias"] + self.ucfg.opcuaServer["mapping_table"][i].pop("measureAlias") + if self.ucfg.opcuaServer["mapping_table"][i]["dataType"] in ('BYTE', + 'SINT', + 'WORD', + 'INT', + 'DWORD', + 'DINT', + 'BCD', + 'BCD32', + 'ULONG', + 'LONG') and "enableBit" not in self.ucfg.opcuaServer["mapping_table"][i]: + self.ucfg.opcuaServer["mapping_table"][i]["enableBit"] = 0 + + else: + self.ucfg.opcuaServer["mapping_table"] = [] + else: + pass + self.ucfg.opcuaServer = {'enable':0, + 'port':4840, 'maximumLink':5, 'securityMode':0, 'certificate':"None", 'privateKey':"None", + 'identifierType':"String", 'useRawvalue':1, 'mapping_table':[]} + if "sl651Slave" in cfg and cfg["sl651Slave"]: + self.ucfg.sl651Slave = cfg["sl651Slave"] + if "useRawvalue" not in self.ucfg.sl651Slave: + self.ucfg.sl651Slave["useRawvalue"] = 1 + if "platform_list" in self.ucfg.sl651Slave: + if self.ucfg.sl651Slave["platform_list"]: + for platform in self.ucfg.sl651Slave["platform_list"]: + if "enableQuarterUpload" not in platform: + platform["enableQuarterUpload"] = 0 + + if "mapping_table" in self.ucfg.sl651Slave: + if self.ucfg.sl651Slave["mapping_table"]: + for i in range(0, len(self.ucfg.sl651Slave["mapping_table"])): + mea = self.find_mea_by_name(self.ucfg.sl651Slave["mapping_table"][i]["ctrlName"], self.ucfg.sl651Slave["mapping_table"][i]["measureName"]) + if mea: + if "northDataType" not in self.ucfg.sl651Slave["mapping_table"][i]: + if "enableBit" in self.ucfg.sl651Slave["mapping_table"][i]: + if self.ucfg.sl651Slave["mapping_table"][i]["enableBit"] == 0: + self.ucfg.sl651Slave["mapping_table"][i]["northDataType"] = mea["dataType"] + else: + self.ucfg.sl651Slave["mapping_table"][i]["northDataType"] = "BIT" + else: + self.ucfg.sl651Slave["mapping_table"][i]["northDataType"] = mea["dataType"] + if "ctrlAlias" in self.ucfg.sl651Slave["mapping_table"][i]: + self.ucfg.sl651Slave["mapping_table"][i]["ctrlName"] = self.ucfg.sl651Slave["mapping_table"][i]["ctrlAlias"] + self.ucfg.sl651Slave["mapping_table"][i].pop("ctrlAlias") + if "measureAlias" in self.ucfg.sl651Slave["mapping_table"][i]: + self.ucfg.sl651Slave["mapping_table"][i]["measureName"] = self.ucfg.sl651Slave["mapping_table"][i]["measureAlias"] + self.ucfg.sl651Slave["mapping_table"][i].pop("measureAlias") + if self.ucfg.sl651Slave["mapping_table"][i]["dataType"] in ('BYTE', + 'SINT', + 'WORD', + 'INT', + 'DWORD', + 'DINT', + 'BCD', + 'ULONG', + 'LONG') and "enableBit" not in self.ucfg.sl651Slave["mapping_table"][i]: + self.ucfg.sl651Slave["mapping_table"][i]["enableBit"] = 0 + + else: + self.ucfg.sl651Slave["mapping_table"] = [] + else: + self.ucfg.sl651Slave = {'enable':0, + 'centerAaddr':1, 'remoteAddr':"", 'addrCode':"", 'password':"", 'platform_list':[], 'useRawvalue':1, + 'mapping_table':[]} + if "hj212Client" in cfg and cfg["hj212Client"]: + self.ucfg.hj212Client = cfg["hj212Client"] + if "useRawvalue" not in self.ucfg.hj212Client: + self.ucfg.hj212Client["useRawvalue"] = 1 + if "mapping_table" not in self.ucfg.hj212Client: + self.ucfg.hj212Client["mapping_table"] = [] + for i in range(0, len(self.ucfg.hj212Client["mapping_table"])): + mea = self.find_mea_by_name(self.ucfg.hj212Client["mapping_table"][i]["ctrlName"], self.ucfg.hj212Client["mapping_table"][i]["measureName"]) + if mea and "northDataType" not in self.ucfg.hj212Client["mapping_table"][i]: + if "enableBit" in self.ucfg.hj212Client["mapping_table"][i]: + if self.ucfg.hj212Client["mapping_table"][i]["enableBit"] == 0: + self.ucfg.hj212Client["mapping_table"][i]["northDataType"] = mea["dataType"] + else: + self.ucfg.hj212Client["mapping_table"][i]["northDataType"] = "BIT" + else: + self.ucfg.hj212Client["mapping_table"][i]["northDataType"] = mea["dataType"] + + else: + self.ucfg.hj212Client = {'enable':0, + 'useRawvalue':1, 'platform_list':[], 'block_list':[], 'mapping_table':[]} + if "bacnetServer" in cfg and cfg["bacnetServer"]: + self.ucfg.bacnetServer = cfg["bacnetServer"] + if "useRawvalue" not in self.ucfg.bacnetServer: + self.ucfg.bacnetServer["useRawvalue"] = 1 + if "mapping_table" not in self.ucfg.bacnetServer: + self.ucfg.bacnetServer["mapping_table"] = [] + for i in range(0, len(self.ucfg.bacnetServer["mapping_table"])): + mea = self.find_mea_by_name(self.ucfg.bacnetServer["mapping_table"][i]["ctrlName"], self.ucfg.bacnetServer["mapping_table"][i]["measureName"]) + if mea and "northDataType" not in self.ucfg.bacnetServer["mapping_table"][i]: + if "enableBit" in self.ucfg.bacnetServer["mapping_table"][i]: + if self.ucfg.bacnetServer["mapping_table"][i]["enableBit"] == 0: + self.ucfg.bacnetServer["mapping_table"][i]["northDataType"] = mea["dataType"] + else: + self.ucfg.bacnetServer["mapping_table"][i]["northDataType"] = "BIT" + else: + self.ucfg.bacnetServer["mapping_table"][i]["northDataType"] = mea["dataType"] + + else: + self.ucfg.bacnetServer = {'enable':0, + 'protocol':"BACnet/IP", 'deviceId':0, 'port':47808, 'bbmdEnable':0, + 'useRawvalue':1, 'mapping_table':[]} + if "Dnp3Server" in cfg and cfg["Dnp3Server"]: + self.ucfg.Dnp3Server = cfg["Dnp3Server"] + if "useRawvalue" not in self.ucfg.Dnp3Server: + self.ucfg.Dnp3Server["useRawvalue"] = 1 + if "mapping_table" not in self.ucfg.Dnp3Server: + self.ucfg.Dnp3Server["mapping_table"] = [] + for i in range(0, len(self.ucfg.Dnp3Server["mapping_table"])): + mea = self.find_mea_by_name(self.ucfg.Dnp3Server["mapping_table"][i]["ctrlName"], self.ucfg.Dnp3Server["mapping_table"][i]["measureName"]) + if mea and "northDataType" not in self.ucfg.Dnp3Server["mapping_table"][i]: + if "enableBit" in self.ucfg.Dnp3Server["mapping_table"][i]: + if self.ucfg.Dnp3Server["mapping_table"][i]["enableBit"] == 0: + self.ucfg.Dnp3Server["mapping_table"][i]["northDataType"] = mea["dataType"] + else: + self.ucfg.Dnp3Server["mapping_table"][i]["northDataType"] = "BIT" + else: + self.ucfg.Dnp3Server["mapping_table"][i]["northDataType"] = mea["dataType"] + + else: + self.ucfg.Dnp3Server = {'enable':0, + 'protocol':"Dnp3-TCP", 'slaveAddr':1, 'masterAddr':2, 'port':20000, 'useRawvalue':1, + 'enableUnsol':0, 'maxFrasize':4096, 'layerTimeout':1000, 'linkRetry':5, 'enableLink':0, + 'mapping_table':[]} + if "iec61850Server" in cfg and cfg["iec61850Server"]: + self.ucfg.iec61850Server = cfg["iec61850Server"] + else: + self.ucfg.iec61850Server = {'enable':0, + 'protocol':"iec61850Server", 'port':102, 'iedName':"INHAND", 'LDName':"Gateway", 'ctrlMode':1, + 'authentication':0, 'password':"123456", 'useRawvalue':1, 'dataSet':[], 'mapping_table':[]} + if "snmpAgent" in cfg and cfg["snmpAgent"]: + self.ucfg.snmpAgent = cfg["snmpAgent"] + else: + self.ucfg.snmpAgent = {'enable':0, + 'port':161, 'useRawvalue':1, 'version':3, 'userName':"", 'enableAuth':0, + 'readWrite':"ro", 'enable_trap':0, 'mapping_table':[]} + if "southMetadata" in cfg and cfg["southMetadata"]: + self.ucfg.southMetadata = cfg["southMetadata"] + else: + self.ucfg.southMetadata = dict() + if "bindMetadata" in cfg and cfg["bindMetadata"]: + self.ucfg.bindMetadata = cfg["bindMetadata"] + else: + self.ucfg.bindMetadata = dict() + if "bindConfig" in cfg and cfg["bindConfig"]: + self.ucfg.bindConfig = cfg["bindConfig"] + else: + self.ucfg.bindConfig = {'enable':0, + 'bind':{ + 'modelId': '""', 'modelName': '""', 'srcId': '""', + 'srcName': '""', 'devId': '""', 'devName': '""'}, + 'varGroups':[], 'variables':[], 'alerts':[]} + if "templates" in cfg and cfg["templates"]: + self.ucfg.templates = cfg["templates"] + else: + self.ucfg.templates = dict() + for ctrlName, temp in self.ucfg.templates.items(): + self.version_change_adapt_ctrl_config(temp["controller"]) + self.version_change_adapt_meas_config(temp["measures"]) + self.ucfg.templates[ctrlName] = temp + + self.ucfg.version = self._get_app_version() + self.filename = self.app_base_path + "/cfg/device_supervisor/device_supervisor.cfg" + self.dump_config_file() + + def dump_config_file(self): + """ + :param self.filename: + :return: + """ + try: + try: + with open((self.filename), "r", encoding="utf-8") as fp: + flash_config = json.load(fp) + except Exception: + flash_config = "" + + if self._generate_md5(flash_config) != self._generate_md5(self.ucfg.__dict__): + filename_bak = "/tmp/config.json" + with open(filename_bak, "w+", encoding="utf-8") as f: + import rapidjson + rapidjson.dump((self.ucfg.__dict__), f, indent=1, ensure_ascii=False) + shutil.move(filename_bak, self.filename) + except Exception as e: + try: + raise IOError("Write config file error %s" % e) + finally: + e = None + del e + + if self.mobiuspi.model_name[None[:2]].startswith("EC") or self.mobiuspi.model_name[None[:3]].startswith("HEC"): + self.master.ec_update_log_timer.add(1) \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/master/ConfigMerge.py b/APPS_UNCOMPILED/src/master/ConfigMerge.py new file mode 100644 index 0000000..5581702 --- /dev/null +++ b/APPS_UNCOMPILED/src/master/ConfigMerge.py @@ -0,0 +1,62 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/ConfigMerge.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2462 bytes + + +def merge_patch_rfc7396(target, patch): + """ + 实现 RFC7396 + """ + if isinstance(patch, dict): + if not isinstance(target, dict): + target = dict() + for key, value in patch.items(): + if value is None: + if key in target: + target.pop(key, True) + elif key not in target: + target[key] = dict() + else: + target[key] = merge_patch_rfc7396(target[key], value) + + return target + return patch + + +def merge_patch_rfc7396_save_null(target, patch): + """ + 实现 RFC7396, 保留value为None的情况 + """ + if isinstance(patch, dict): + if not isinstance(target, dict): + target = dict() + for key, value in patch.items(): + if key not in target: + target[key] = dict() + target[key] = merge_patch_rfc7396_save_null(target[key], value) + + return target + return patch + + +if __name__ == "__main__": + assert merge_patch_rfc7396({"a": "b"}, {"a": "c"}) == {"a": "c"} + assert merge_patch_rfc7396({"a": "b"}, {"b": "c"}) == {'a':"b", 'b':"c"} + assert merge_patch_rfc7396({"a": "b"}, {"a": None}) == {} + assert merge_patch_rfc7396({"c": "b"}, {"a": None}) == {"c": "b"} + assert merge_patch_rfc7396({'a':"b", 'b':"c"}, {"a": None}) == {"b": "c"} + assert merge_patch_rfc7396({"a": ["b"]}, {"a": "c"}) == {"a": "c"} + assert merge_patch_rfc7396({"a": "c"}, {"a": ["b"]}) == {"a": ["b"]} + assert merge_patch_rfc7396({"a": {"b": "c"}}, {"a": {'b':"d", 'c':None}}) == {"a": {"b": "d"}} + assert merge_patch_rfc7396({"a": [{"b": "c"}]}, {"a": [1]}) == {"a": [1]} + assert merge_patch_rfc7396(["a", "b"], ["c", "d"]) == ["c", "d"] + assert merge_patch_rfc7396({"a": "b"}, ["c"]) == ["c"] + assert merge_patch_rfc7396({"a": "foo"}, None) is None + assert merge_patch_rfc7396({"a": "foo"}, "bar") == "bar" + assert merge_patch_rfc7396({"e": None}, {"a": 1}) == {'e':None, 'a':1} + assert merge_patch_rfc7396([1, 2], {'a':"b", 'c':None}) == {"a": "b"} + assert merge_patch_rfc7396({}, {"a": {"bb": {"ccc": None}}}) == {"a": {"bb": {}}} diff --git a/APPS_UNCOMPILED/src/master/ConfigSchema.py b/APPS_UNCOMPILED/src/master/ConfigSchema.py new file mode 100644 index 0000000..3b2d8b8 --- /dev/null +++ b/APPS_UNCOMPILED/src/master/ConfigSchema.py @@ -0,0 +1,746 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/ConfigSchema.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 37599 bytes +import json, time, random, collections, copy +from jsonpath import jsonpath +from master.ConfigMerge import merge_patch_rfc7396 +from jsonschema import validate +from common.Logger import logger + +def simple_config_generate_array_key(index): + """ + 生成数组元素key:"" + """ + return "%04x%08x%04x" % (int(index) % 65535, int(time.time()), random.randint(0, 65535)) + + +class SimpleConfigIndex(object): + __doc__ = "\n 上层业务逻辑必须保证typ + key 唯一\n " + + def __init__(self): + self.index = dict() + + def clear(self): + self.index.clear() + + def add_index(self, typ, key, value): + type_key = "%s_%s" % (typ, key) + self.index[type_key] = value + + def del_index(self, typ, key): + type_key = "%s_%s" % (typ, key) + del self.index[type_key] + + def get(self, typ, key): + type_key = "%s_%s" % (typ, key) + if type_key in self.index: + return self.index[type_key] + return + + +def simple_config_map_array(from_object, to_object, attr, db_index, key): + """ + 将JSON数组类型(from_object)映射成字典类型(to_object),每个数组元素都是一个object。 + 只映射顶层数组,不做嵌套映射。 + """ + to_object[attr] = collections.OrderedDict() + for i in range(0, len(from_object)): + element = from_object[i] + if "_id" in element: + id = element["_id"] + else: + id = simple_config_generate_array_key(i) + element["_id"] = id + to_object[attr][id] = element + db_index.add_index(typ=attr, key=(element[key]), value=id) + + +def simple_config_revert_map_arry(from_object, to_object, attr): + """ + 将字典类型(from_object)映射成JSON数组类型(to_object),每个数组元素都是一个object。 + 只映射顶层数组,不做嵌套映射。 + """ + to_object.clear() + for k, v in from_object[attr].items(): + to_object.append(copy.deepcopy(v)) + + +def simple_config_load_user_config(user_config, db_index): + """ + 从用户配置文件初始simple config DB。如果加载失败,则抛出异常。 + """ + simple_config = dict() + db_index.clear() + simple_config["measures"] = dict() + simple_config["controllers"] = collections.OrderedDict() + for i in range(0, len(user_config.controllers)): + con = user_config.controllers[i] + if "_id" in con: + key = con["_id"] + else: + key = simple_config_generate_array_key(i) + con["_id"] = key + simple_config["controllers"][key] = con + db_index.add_index(typ="controllers", key=(con["name"]), value=key) + mea = collections.OrderedDict() + simple_config["measures"].update({(con["name"]): mea}) + + for i in range(0, len(user_config.measures)): + mea = user_config.measures[i] + con_key = db_index.get("controllers", mea["ctrlName"]) + if not con_key: + print("There is not any controller named %s" % mea["ctrlName"]) + continue + elif mea["ctrlName"] not in simple_config["measures"]: + simple_config["measures"][mea["ctrlName"]] = collections.OrderedDict() + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["measures"][mea["ctrlName"]][mea_key] = mea + db_index.add_index(typ="measures", key=(mea["name"]), value=mea_key) + + simple_config_map_array(user_config.groups, simple_config, "groups", db_index, "name") + simple_config["alarmLables"] = user_config.alarmLables + simple_config_map_array(user_config.alarms, simple_config, "alarms", db_index, "name") + simple_config_map_array(user_config.labels, simple_config, "labels", db_index, "key") + simple_config_map_array(user_config.clouds, simple_config, "clouds", db_index, "name") + simple_config_map_array(user_config.mindspheres, simple_config, "mindspheres", db_index, "name") + simple_config["modbusSlave"] = dict() + modbus_slave = copy.deepcopy(user_config.modbusSlave) + if "mappingTable" in modbus_slave: + del modbus_slave["mappingTable"] + simple_config["modbusSlave"]["config"] = modbus_slave + if "mappingTable" not in simple_config["modbusSlave"]: + simple_config["modbusSlave"]["mappingTable"] = collections.OrderedDict() + for i in range(0, len(user_config.modbusSlave["mappingTable"])): + meas = user_config.modbusSlave["mappingTable"][i] + if "_id" in meas: + meas_key = meas["_id"] + else: + meas_key = simple_config_generate_array_key(i) + meas["_id"] = meas_key + simple_config["modbusSlave"]["mappingTable"][meas_key] = {} + simple_config["modbusSlave"]["mappingTable"][meas_key]["_id"] = meas_key + simple_config["modbusSlave"]["mappingTable"][meas_key]["name"] = meas["name"] + simple_config["modbusSlave"]["mappingTable"][meas_key]["slaveAddr"] = meas["slaveAddr"] + simple_config["modbusSlave"]["mappingTable"][meas_key]["measures"] = collections.OrderedDict() + db_index.add_index(typ="modbusSlave", key=(meas["name"]), value=meas_key) + for j in range(0, len(user_config.modbusSlave["mappingTable"][i]["measures"])): + mea = user_config.modbusSlave["mappingTable"][i]["measures"][j] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(j) + mea["_id"] = mea_key + simple_config["modbusSlave"]["mappingTable"][meas_key]["measures"][mea_key] = mea + db_index.add_index(typ="modbusSlaveMappingTable", key=(mea["measureName"]), value=mea_key) + + simple_config["modbusRTUSlave"] = dict() + modbus_slave = copy.deepcopy(user_config.modbusRTUSlave) + if "mappingTable" in modbus_slave: + del modbus_slave["mappingTable"] + simple_config["modbusRTUSlave"]["config"] = modbus_slave + if "mappingTable" not in simple_config["modbusRTUSlave"]: + simple_config["modbusRTUSlave"]["mappingTable"] = collections.OrderedDict() + for i in range(0, len(user_config.modbusRTUSlave["mappingTable"])): + meas = user_config.modbusRTUSlave["mappingTable"][i] + if "_id" in meas: + meas_key = meas["_id"] + else: + meas_key = simple_config_generate_array_key(i) + meas["_id"] = meas_key + simple_config["modbusRTUSlave"]["mappingTable"][meas_key] = {} + simple_config["modbusRTUSlave"]["mappingTable"][meas_key]["_id"] = meas_key + simple_config["modbusRTUSlave"]["mappingTable"][meas_key]["name"] = meas["name"] + simple_config["modbusRTUSlave"]["mappingTable"][meas_key]["slaveAddr"] = meas["slaveAddr"] + simple_config["modbusRTUSlave"]["mappingTable"][meas_key]["measures"] = collections.OrderedDict() + db_index.add_index(typ="modbusRTUSlave", key=(meas["name"]), value=meas_key) + for j in range(0, len(user_config.modbusRTUSlave["mappingTable"][i]["measures"])): + mea = user_config.modbusRTUSlave["mappingTable"][i]["measures"][j] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(j) + mea["_id"] = mea_key + simple_config["modbusRTUSlave"]["mappingTable"][meas_key]["measures"][mea_key] = mea + db_index.add_index(typ="modbusRTUSlaveMappingTable", key=(mea["measureName"]), value=mea_key) + + simple_config["iec104Server"] = dict() + iec104_server = copy.deepcopy(user_config.iec104Server) + if "mapping_table" in iec104_server: + del iec104_server["mapping_table"] + simple_config["iec104Server"]["config"] = iec104_server + if "mapping_table" not in simple_config["iec104Server"]: + simple_config["iec104Server"]["mapping_table"] = collections.OrderedDict() + for i in range(0, len(user_config.iec104Server["mapping_table"])): + mea = user_config.iec104Server["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["iec104Server"]["mapping_table"][mea_key] = mea + db_index.add_index(typ="iec104Server", key=(mea["measureName"]), value=mea_key) + + simple_config["iec101Server"] = dict() + iec101_server = copy.deepcopy(user_config.iec101Server) + if "mappingTable" in iec101_server: + del iec101_server["mappingTable"] + simple_config["iec101Server"]["config"] = iec101_server + simple_config["iec101Server"]["mappingTable"] = dict() + simple_config["iec101Server"]["mappingTable"]["YX"] = collections.OrderedDict() + simple_config["iec101Server"]["mappingTable"]["YC"] = collections.OrderedDict() + simple_config["iec101Server"]["mappingTable"]["YK"] = collections.OrderedDict() + for i in range(0, len(user_config.iec101Server["mappingTable"]["YX"])): + item = user_config.iec101Server["mappingTable"]["YX"][i] + if "_id" in item: + item_key = item["_id"] + else: + item_key = simple_config_generate_array_key(i) + item["_id"] = item_key + simple_config["iec101Server"]["mappingTable"]["YX"][item_key] = item + db_index.add_index(typ="iec101Server", key=(item["measureName"]), value=item_key) + + for i in range(0, len(user_config.iec101Server["mappingTable"]["YC"])): + item = user_config.iec101Server["mappingTable"]["YC"][i] + if "_id" in item: + item_key = item["_id"] + else: + item_key = simple_config_generate_array_key(i) + item["_id"] = item_key + simple_config["iec101Server"]["mappingTable"]["YC"][item_key] = item + db_index.add_index(typ="iec101Server", key=(item["measureName"]), value=item_key) + + for i in range(0, len(user_config.iec101Server["mappingTable"]["YK"])): + item = user_config.iec101Server["mappingTable"]["YK"][i] + if "_id" in item: + item_key = item["_id"] + else: + item_key = simple_config_generate_array_key(i) + item["_id"] = item_key + simple_config["iec101Server"]["mappingTable"]["YK"][item_key] = item + db_index.add_index(typ="iec101Server", key=(item["measureName"]), value=item_key) + + simple_config["iec104Client"] = dict() + iec104Client = copy.deepcopy(user_config.iec104Client) + if "mapping_table" in iec104Client: + del iec104Client["mapping_table"] + simple_config["iec104Client"]["config"] = iec104Client + simple_config["iec104Client"]["mapping_table"] = dict() + simple_config["iec104Client"]["mapping_table"]["YX"] = collections.OrderedDict() + simple_config["iec104Client"]["mapping_table"]["YC"] = collections.OrderedDict() + simple_config["iec104Client"]["mapping_table"]["YK"] = collections.OrderedDict() + for i in range(0, len(user_config.iec104Client["mapping_table"]["YX"])): + item = user_config.iec104Client["mapping_table"]["YX"][i] + if "_id" in item: + item_key = item["_id"] + else: + item_key = simple_config_generate_array_key(i) + item["_id"] = item_key + simple_config["iec104Client"]["mapping_table"]["YX"][item_key] = item + db_index.add_index(typ="iec104Client", key=(item["measureName"]), value=item_key) + + for i in range(0, len(user_config.iec104Client["mapping_table"]["YC"])): + item = user_config.iec104Client["mapping_table"]["YC"][i] + if "_id" in item: + item_key = item["_id"] + else: + item_key = simple_config_generate_array_key(i) + item["_id"] = item_key + simple_config["iec104Client"]["mapping_table"]["YC"][item_key] = item + db_index.add_index(typ="iec104Client", key=(item["measureName"]), value=item_key) + + for i in range(0, len(user_config.iec104Client["mapping_table"]["YK"])): + item = user_config.iec104Client["mapping_table"]["YK"][i] + if "_id" in item: + item_key = item["_id"] + else: + item_key = simple_config_generate_array_key(i) + item["_id"] = item_key + simple_config["iec104Client"]["mapping_table"]["YK"][item_key] = item + db_index.add_index(typ="iec104Client", key=(item["measureName"]), value=item_key) + + simple_config["opcuaServer"] = dict() + opcua_server = copy.deepcopy(user_config.opcuaServer) + if "mapping_table" in opcua_server: + del opcua_server["mapping_table"] + simple_config["opcuaServer"]["config"] = opcua_server + if "mapping_table" not in simple_config["opcuaServer"]: + simple_config["opcuaServer"]["mapping_table"] = collections.OrderedDict() + for i in range(0, len(user_config.opcuaServer["mapping_table"])): + mea = user_config.opcuaServer["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["opcuaServer"]["mapping_table"][mea_key] = mea + db_index.add_index(typ="opcuaServer", key=(mea["measureName"]), value=mea_key) + + simple_config["sl651Slave"] = dict() + sl651_slave = copy.deepcopy(user_config.sl651Slave) + if "mapping_table" in sl651_slave: + del sl651_slave["mapping_table"] + simple_config["sl651Slave"]["config"] = sl651_slave + if "mapping_table" not in simple_config["sl651Slave"]: + simple_config["sl651Slave"]["mapping_table"] = collections.OrderedDict() + for i in range(0, len(user_config.sl651Slave["mapping_table"])): + mea = user_config.sl651Slave["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["sl651Slave"]["mapping_table"][mea_key] = mea + db_index.add_index(typ="sl651Slave", key=(mea["measureName"]), value=mea_key) + + simple_config["hj212Client"] = dict() + hj212_client = copy.deepcopy(user_config.hj212Client) + if "mapping_table" in hj212_client: + del hj212_client["mapping_table"] + if "block_list" in hj212_client: + del hj212_client["block_list"] + simple_config["hj212Client"]["config"] = hj212_client + if "block_list" not in simple_config["hj212Client"]["config"]: + simple_config["hj212Client"]["config"]["block_list"] = collections.OrderedDict() + for i in range(0, len(user_config.hj212Client["block_list"])): + block = user_config.hj212Client["block_list"][i] + if "_id" in block: + block_key = block["_id"] + else: + block_key = simple_config_generate_array_key(i) + block["_id"] = block_key + simple_config["hj212Client"]["config"]["block_list"][block_key] = block + db_index.add_index(typ="hj212Client", key=(block["name"]), value=block_key) + + if "mapping_table" not in simple_config["hj212Client"]: + simple_config["hj212Client"]["mapping_table"] = collections.OrderedDict() + for i in range(0, len(user_config.hj212Client["mapping_table"])): + mea = user_config.hj212Client["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["hj212Client"]["mapping_table"][mea_key] = mea + db_index.add_index(typ="hj212Client", key=(mea["measureName"]), value=mea_key) + + simple_config["bacnetServer"] = dict() + bacnet_server = copy.deepcopy(user_config.bacnetServer) + if "mapping_table" in bacnet_server: + del bacnet_server["mapping_table"] + simple_config["bacnetServer"]["config"] = bacnet_server + if "mapping_table" not in simple_config["bacnetServer"]: + simple_config["bacnetServer"]["mapping_table"] = collections.OrderedDict() + for i in range(0, len(user_config.bacnetServer["mapping_table"])): + mea = user_config.bacnetServer["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["bacnetServer"]["mapping_table"][mea_key] = mea + db_index.add_index(typ="bacnetServer", key=(mea["measureName"]), value=mea_key) + + simple_config["Dnp3Server"] = dict() + Dnp3Server = copy.deepcopy(user_config.Dnp3Server) + if "mapping_table" in Dnp3Server: + del Dnp3Server["mapping_table"] + simple_config["Dnp3Server"]["config"] = Dnp3Server + if "mapping_table" not in simple_config["Dnp3Server"]: + simple_config["Dnp3Server"]["mapping_table"] = collections.OrderedDict() + for i in range(0, len(user_config.Dnp3Server["mapping_table"])): + mea = user_config.Dnp3Server["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["Dnp3Server"]["mapping_table"][mea_key] = mea + db_index.add_index(typ="Dnp3Server", key=(mea["measureName"]), value=mea_key) + + simple_config["iec61850Server"] = dict() + iec61850Server = copy.deepcopy(user_config.iec61850Server) + if "mapping_table" in iec61850Server: + del iec61850Server["mapping_table"] + simple_config["iec61850Server"]["config"] = iec61850Server + if "mapping_table" not in simple_config["iec61850Server"]: + simple_config["iec61850Server"]["mapping_table"] = collections.OrderedDict() + for i in range(0, len(user_config.iec61850Server["mapping_table"])): + mea = user_config.iec61850Server["mapping_table"][i] + mea_key = simple_config_generate_array_key(i) + simple_config["iec61850Server"]["mapping_table"][mea_key] = mea + db_index.add_index(typ="iec61850Server", key=(mea["measureName"]), value=mea_key) + + simple_config["snmpAgent"] = dict() + snmpAgent = copy.deepcopy(user_config.snmpAgent) + if "mapping_table" in snmpAgent: + del snmpAgent["mapping_table"] + simple_config["snmpAgent"]["config"] = snmpAgent + if "mapping_table" not in simple_config["snmpAgent"]: + simple_config["snmpAgent"]["mapping_table"] = collections.OrderedDict() + for i in range(0, len(user_config.snmpAgent["mapping_table"])): + mea = user_config.snmpAgent["mapping_table"][i] + mea_key = simple_config_generate_array_key(i) + simple_config["snmpAgent"]["mapping_table"][mea_key] = mea + db_index.add_index(typ="snmpAgent", key=(mea["measureName"]), value=mea_key) + + simple_config["quickfaas"] = dict() + simple_config["quickfaas"]["uploadFuncs"] = dict() + simple_config["quickfaas"]["downloadFuncs"] = dict() + simple_config["quickfaas"]["genericFuncs"] = user_config.quickfaas["genericFuncs"] + for cloud in user_config.clouds: + simple_config["quickfaas"]["uploadFuncs"][cloud["name"]] = list() + simple_config["quickfaas"]["downloadFuncs"][cloud["name"]] = list() + + for uploadFunc in user_config.quickfaas["uploadFuncs"]: + simple_config["quickfaas"]["uploadFuncs"][uploadFunc["cloudName"]].append(uploadFunc) + + for downloadFunc in user_config.quickfaas["downloadFuncs"]: + simple_config["quickfaas"]["downloadFuncs"][downloadFunc["cloudName"]].append(downloadFunc) + + simple_config["misc"] = user_config.misc + simple_config["templates"] = user_config.templates + return simple_config + + +def simple_config_save_user_config(user_config, simple_config): + """ + 将simple config DB保存成用户配置文件 + """ + user_config.controllers = list() + for key, con in simple_config["controllers"].items(): + user_con = copy.deepcopy(con) + user_config.controllers.append(user_con) + + user_config.measures = list() + for key, con_meas in simple_config["measures"].items(): + for key2, mea in con_meas.items(): + user_config.measures.append(mea) + + simple_config_revert_map_arry(simple_config, user_config.groups, "groups") + simple_config_revert_map_arry(simple_config, user_config.alarms, "alarms") + simple_config_revert_map_arry(simple_config, user_config.labels, "labels") + simple_config_revert_map_arry(simple_config, user_config.clouds, "clouds") + user_config.alarmLables = list() + user_config.alarmLables = simple_config["alarmLables"] + user_config.modbusSlave = copy.deepcopy(simple_config["modbusSlave"]["config"]) + if "mappingTable" in simple_config["modbusSlave"]: + user_config.modbusSlave["mappingTable"] = [] + for mappingTableValue in list(simple_config["modbusSlave"]["mappingTable"].values()): + modbus_map_table = dict() + modbus_map_table["name"] = mappingTableValue["name"] + modbus_map_table["slaveAddr"] = mappingTableValue["slaveAddr"] + modbus_map_table["_id"] = mappingTableValue["_id"] + modbus_map_table["measures"] = list() + if "measures" in mappingTableValue: + simple_config_revert_map_arry(mappingTableValue, modbus_map_table["measures"], "measures") + user_config.modbusSlave["mappingTable"].append(modbus_map_table) + + else: + user_config.modbusSlave["mappingTable"] = [] + user_config.modbusRTUSlave = copy.deepcopy(simple_config["modbusRTUSlave"]["config"]) + if "mappingTable" in simple_config["modbusRTUSlave"]: + user_config.modbusRTUSlave["mappingTable"] = [] + for mappingTableValue in list(simple_config["modbusRTUSlave"]["mappingTable"].values()): + modbus_map_table = dict() + modbus_map_table["name"] = mappingTableValue["name"] + modbus_map_table["slaveAddr"] = mappingTableValue["slaveAddr"] + modbus_map_table["_id"] = mappingTableValue["_id"] + modbus_map_table["measures"] = list() + if "measures" in mappingTableValue: + simple_config_revert_map_arry(mappingTableValue, modbus_map_table["measures"], "measures") + user_config.modbusRTUSlave["mappingTable"].append(modbus_map_table) + + else: + user_config.modbusRTUSlave["mappingTable"] = [] + user_config.iec104Server = copy.deepcopy(simple_config["iec104Server"]["config"]) + if "mapping_table" in simple_config["iec104Server"]: + iec104_map_table = list() + simple_config_revert_map_arry(simple_config["iec104Server"], iec104_map_table, "mapping_table") + user_config.iec104Server["mapping_table"] = iec104_map_table + else: + user_config.iec104Server["mapping_table"] = [] + user_config.iec101Server = copy.deepcopy(simple_config["iec101Server"]["config"]) + user_config.iec101Server.update({"mappingTable": {'YX':[], 'YC':[], 'YK':[]}}) + if "YX" in simple_config["iec101Server"]["mappingTable"]: + iec101_YX_map_table = list() + simple_config_revert_map_arry(simple_config["iec101Server"]["mappingTable"], iec101_YX_map_table, "YX") + user_config.iec101Server["mappingTable"]["YX"] = iec101_YX_map_table + else: + user_config.iec101Server["mappingTable"]["YX"] = [] + if "YC" in simple_config["iec101Server"]["mappingTable"]: + iec101_YC_map_table = list() + simple_config_revert_map_arry(simple_config["iec101Server"]["mappingTable"], iec101_YC_map_table, "YC") + user_config.iec101Server["mappingTable"]["YC"] = iec101_YC_map_table + else: + user_config.iec101Server["mappingTable"]["YC"] = [] + if "YK" in simple_config["iec101Server"]["mappingTable"]: + iec101_YK_map_table = list() + simple_config_revert_map_arry(simple_config["iec101Server"]["mappingTable"], iec101_YK_map_table, "YK") + user_config.iec101Server["mappingTable"]["YK"] = iec101_YK_map_table + else: + user_config.iec101Server["mappingTable"]["YK"] = [] + user_config.iec104Client = copy.deepcopy(simple_config["iec104Client"]["config"]) + user_config.iec104Client.update({"mapping_table": {'YX':[], 'YC':[], 'YK':[]}}) + if "YX" in simple_config["iec104Client"]["mapping_table"]: + iec104_YX_map_table = list() + simple_config_revert_map_arry(simple_config["iec104Client"]["mapping_table"], iec104_YX_map_table, "YX") + user_config.iec104Client["mapping_table"]["YX"] = iec104_YX_map_table + else: + user_config.iec104Client["mapping_table"]["YX"] = [] + if "YC" in simple_config["iec104Client"]["mapping_table"]: + iec104_YC_map_table = list() + simple_config_revert_map_arry(simple_config["iec104Client"]["mapping_table"], iec104_YC_map_table, "YC") + user_config.iec104Client["mapping_table"]["YC"] = iec104_YC_map_table + else: + user_config.iec104Client["mapping_table"]["YC"] = [] + if "YK" in simple_config["iec104Client"]["mapping_table"]: + iec104_YK_map_table = list() + simple_config_revert_map_arry(simple_config["iec104Client"]["mapping_table"], iec104_YK_map_table, "YK") + user_config.iec104Client["mapping_table"]["YK"] = iec104_YK_map_table + else: + user_config.iec104Client["mapping_table"]["YK"] = [] + user_config.opcuaServer = copy.deepcopy(simple_config["opcuaServer"]["config"]) + if "mapping_table" in simple_config["opcuaServer"]: + opcua_map_table = list() + simple_config_revert_map_arry(simple_config["opcuaServer"], opcua_map_table, "mapping_table") + user_config.opcuaServer["mapping_table"] = opcua_map_table + else: + user_config.opcuaServer["mapping_table"] = [] + user_config.sl651Slave = copy.deepcopy(simple_config["sl651Slave"]["config"]) + if "mapping_table" in simple_config["sl651Slave"]: + sl651_map_table = list() + simple_config_revert_map_arry(simple_config["sl651Slave"], sl651_map_table, "mapping_table") + user_config.sl651Slave["mapping_table"] = sl651_map_table + else: + user_config.sl651Slave["mapping_table"] = [] + user_config.hj212Client = copy.deepcopy(simple_config["hj212Client"]["config"]) + if "block_list" in simple_config["hj212Client"]["config"]: + hj212_block_list = list() + simple_config_revert_map_arry(simple_config["hj212Client"]["config"], hj212_block_list, "block_list") + user_config.hj212Client["block_list"] = hj212_block_list + else: + user_config.hj212Client["block_list"] = [] + if "mapping_table" in simple_config["hj212Client"]: + hj212_map_table = list() + simple_config_revert_map_arry(simple_config["hj212Client"], hj212_map_table, "mapping_table") + user_config.hj212Client["mapping_table"] = hj212_map_table + else: + user_config.hj212Client["mapping_table"] = [] + user_config.bacnetServer = copy.deepcopy(simple_config["bacnetServer"]["config"]) + if "mapping_table" in simple_config["bacnetServer"]: + bacnet_map_table = list() + simple_config_revert_map_arry(simple_config["bacnetServer"], bacnet_map_table, "mapping_table") + user_config.bacnetServer["mapping_table"] = bacnet_map_table + else: + user_config.bacnetServer["mapping_table"] = [] + user_config.Dnp3Server = copy.deepcopy(simple_config["Dnp3Server"]["config"]) + if "mapping_table" in simple_config["Dnp3Server"]: + dnp3Server_map_table = list() + simple_config_revert_map_arry(simple_config["Dnp3Server"], dnp3Server_map_table, "mapping_table") + user_config.Dnp3Server["mapping_table"] = dnp3Server_map_table + else: + user_config.Dnp3Server["mapping_table"] = [] + user_config.iec61850Server = copy.deepcopy(simple_config["iec61850Server"]["config"]) + if "mapping_table" in simple_config["iec61850Server"]: + iec61850_map_table = list() + simple_config_revert_map_arry(simple_config["iec61850Server"], iec61850_map_table, "mapping_table") + user_config.iec61850Server["mapping_table"] = iec61850_map_table + else: + user_config.iec61850Server["mapping_table"] = [] + user_config.snmpAgent = copy.deepcopy(simple_config["snmpAgent"]["config"]) + if "mapping_table" in simple_config["snmpAgent"]: + snmpAgent_map_table = list() + simple_config_revert_map_arry(simple_config["snmpAgent"], snmpAgent_map_table, "mapping_table") + user_config.snmpAgent["mapping_table"] = snmpAgent_map_table + else: + user_config.snmpAgent["mapping_table"] = [] + user_config.quickfaas["genericFuncs"] = copy.deepcopy(simple_config["quickfaas"]["genericFuncs"]) + user_config.quickfaas["uploadFuncs"] = list() + for _, uploadFuncs in simple_config["quickfaas"]["uploadFuncs"].items(): + for uploadFunc in uploadFuncs: + user_config.quickfaas["uploadFuncs"].append(uploadFunc) + + user_config.quickfaas["downloadFuncs"] = list() + for _, downloadFuncs in simple_config["quickfaas"]["downloadFuncs"].items(): + for downloadFunc in downloadFuncs: + user_config.quickfaas["downloadFuncs"].append(downloadFunc) + + user_config.templates = simple_config["templates"] + + +def simple_config_get_value(simple_config, path): + return jsonpath(simple_config, path) + + +def simple_config_build_object(obj, field, value): + """ + 在object中创建对象,新建时默认为dict(),递归构建 + object为dict() + filed为list(), 长度必须大于0 + value是对象值 + """ + if len(field) == 0: + return + if len(field) == 1: + obj[field[0]] = value + return + if field[0] not in obj: + obj[field[0]] = dict() + simple_config_build_object(obj[field[0]], field[1[:None]], value) + + +def simple_config_scanParse error at or near `COME_FROM' instruction at offset 190_2 + + +class ConfigSchema(object): + + def __init__(self, user_conf): + self.simple_config = dict() + self.simple_config["device_supervisor"] = dict() + self.simple_config_old = dict() + self.simple_config_old["device_supervisor"] = dict() + self.simple_config_indexs = SimpleConfigIndex() + self.user_conf = user_conf + self.schema_file = user_conf.app_path + "/src/master/config_schema.json" + self.schema = None + with open(self.schema_file, "r") as f: + self.schema = json.load(f) + self.load_persistant_config() + + def get_fields(self, fields): + """ + fields = [device_supervisor.controllers, device_supervisor.groups] + results: + { + config: { + device_supervisor: { + controllers: { + "00006076cc3f552a": { + "protocol": "Modbus-TCP", + "name": "Modbus", + "args": { + "slaveAddr": 4, + "int16Ord": "ab", + "int32Ord": "abcd", + "float32Ord": "abcd", + "continuousAcquisition": 1, + "maxContinuousNumber": 64 + }, + "samplePeriod": 10, + "expired": 10000, + "endpoint": "10.5.23.58:502", + "_id": "00006076cc3f552a", + } + }, + groups: { + "00006076cc3f90a1": { + "name": "default", + "uploadInterval": 10, + "_id": "00006076cc3f90a1" + } + } + } + } + } + """ + result = dict() + result["config"] = dict() + result["config"]["device_supervisor"] = dict() + for field in fields: + matchs = simple_config_get_value(self.simple_config, "$." + field) + if matchs is False: + return "failed" + if len(matchs) > 0: + key_path = field.split(".") + simple_config_build_object(result["config"], key_path, matchs[0]) + + return result + + def put(self, patch): + try: + validate(instance=patch, schema=(self.schema)) + except json.decoder.JSONDecodeError as e: + try: + logger.error(e) + return False + finally: + e = None + del e + + except Exception as e: + try: + logger.error(e) + return False + finally: + e = None + del e + + return True + + def merge_patch(self, patch): + self.simple_config = merge_patch_rfc7396(self.simple_config, patch) + + def load_persistant_config(self): + """ + 从用户配置文件加载simple config。 + """ + self.simple_config["device_supervisor"] = simple_config_load_user_config(self.user_conf.ucfg, self.simple_config_indexs) + + def save_persistant_config(self): + """ + 将simple config保持到Flash文件系统的配置文件中。 + """ + simple_config_save_user_config(self.user_conf.ucfg, self.simple_config["device_supervisor"]) + self.user_conf.dump_config_file() + + +if __name__ == "__main__": + print(simple_config_generate_array_key(3)) + simple_config_index = SimpleConfigIndex() + simple_config = simple_config_load_user_config("./device_supervisor.cfg", simple_config_index) + with open("./simple_config.json", "w", encoding="utf-8") as f: + json.dump(simple_config, f) + f.write("\n") + json.dump(simple_config_index.index, f) + simple_config_save_user_config("./device_supervisor2.json", simple_config) + import json_tools + with open("./device_supervisor.cfg", "r") as f: + config1 = json.load(f) + with open("./device_supervisor2.json", "r") as f: + config2 = json.load(f) + result = json_tools.diff(config1, config2) + print(result) + assert result == [] + scdb = ConfigSchema() + scdb.load_persistant_config() + groups = scdb.get_fields(["device_supervisor.groups"]) + print(json.dumps(groups)) + print(json.dumps(scdb.get_fields(["device_supervisor.misc"]))) + print(json.dumps(scdb.get_fields(["device_supervisor.clouds"]))) + for k, v in groups["config"]["device_supervisor"]["groups"].items(): + print(json.dumps(scdb.get_fields(["device_supervisor.groups.%s" % k]))) + + scdb.put({"device_supervisor": {"merge_test": "ok"}}) + scdb.put({"device_supervisor": {"merge_test": None}}) + config = scdb.get_fields(["device_supervisor.controllers"]) + for k, v in config["config"]["device_supervisor"]["controllers"].items(): + scdb.put({"device_supervisor": {"controllers": {("%s" % k): {"name": "modbus-put"}}}}) + break + + print(json.dumps(scdb.get_fields([ + "device_supervisor.groups", "device_supervisor.merge_test"]))) + paths = list() + simple_config_scan((scdb.simple_config), this_path="$", paths=paths) + with open("validate.csv", "w") as f: + for p in paths: + f.write("%s,\n" % p) + + print("OK") \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/master/DriverTls.py b/APPS_UNCOMPILED/src/master/DriverTls.py new file mode 100644 index 0000000..f98d377 --- /dev/null +++ b/APPS_UNCOMPILED/src/master/DriverTls.py @@ -0,0 +1,124 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/DriverTls.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 6206 bytes +from master.Services import Snap7DriverService, OpcUaDriverService, OpcDaDriverService, ModbusDriverService, DriverAgentService, FlowmeterDriverService, Dlt64597DriverService, Iec101MasterService, EasyProtocolDriverService, Euromap63DriverService, Dnp3DriverService, BacnetBipDriverService, BacnetMstpDriverService, Iec61850DriverService, kebaSocketDriverService, CJT1882004DriverService, ModbusRtuSlaveService, HJ212DriverService, Dlt64507DriverService, HJ2122005DriverService, OpcUaPubSubDriverService +from common.Protocol import DLT645_1997, MbRtu, MbTcp, MbAscii, MbRtuOverTcp, OpcUa, OpcDa, TANCYV13, Snap7, VRCON, IEC101, IEC103, IEC104, Easycom, EasyEthernet, BacBip, BacMstp, Euromap63, DNP3Tcp, DNP3Udp, DNP3RTU, Iec61850_MMS, KeBaSocket, CJT188_2004, MbRtuSlave, Hj212_Serial, DLT645_2007, Hj2122005_Serial, OpcUa_PubSub + +class DriverTls: + + @staticmethod + def build_driverkey_by_controller(controller): + protocol = controller["protocol"] + if protocol in [Snap7, OpcUa, OpcDa, MbRtu, MbTcp, MbAscii, MbRtuOverTcp, DLT645_1997, + IEC101, IEC103, IEC104, + Easycom, EasyEthernet, + BacMstp, Euromap63, Iec61850_MMS, KeBaSocket, CJT188_2004, + MbRtuSlave, + Hj212_Serial, DLT645_2007, Hj2122005_Serial, + OpcUa_PubSub]: + driverkey = protocol + controller["endpoint"] + else: + if protocol in [DNP3Udp, DNP3Tcp, DNP3RTU]: + driverkey = protocol + controller["endpoint"] + (str(controller["args"]["masterAddr"]) + str(controller["args"]["slaveAddr"])) + else: + if protocol in [VRCON]: + driverkey = "Virtual" + else: + if protocol in [BacBip]: + driverkey = protocol + else: + driverkey = "DriverAgent" + return driverkey + + @staticmethod + def build_each_driver_service(name, controller, all_config, driverkey): + protocol = controller["protocol"] + if protocol == Snap7: + program = "/var/user/app/device_supervisor/src/drvr.pyc" + service = Snap7DriverService(name, all_config, driverkey, program) + else: + if protocol == OpcUa: + program = "/var/user/app/device_supervisor/OpcuaDriver" + service = OpcUaDriverService(name, all_config, driverkey, program) + else: + if protocol == OpcUa_PubSub: + program = "/var/user/app/device_supervisor/OpcuaPubSubDriver" + service = OpcUaPubSubDriverService(name, all_config, driverkey, program) + else: + if protocol == OpcDa: + program = "/var/user/app/device_supervisor/src/drvr.pyc" + service = OpcDaDriverService(name, all_config, driverkey, program) + else: + if protocol in [MbRtu, MbTcp, MbAscii, MbRtuOverTcp]: + program = "/var/user/app/device_supervisor/ModbusDriver" + service = ModbusDriverService(name, all_config, driverkey, program) + else: + if protocol in [MbRtuSlave]: + program = "/var/user/app/device_supervisor/ModbusRtuSlaveDriver" + service = ModbusRtuSlaveService(name, all_config, driverkey, program) + else: + if protocol == BacBip: + program = "/var/user/app/device_supervisor/BacnetbipDriver" + service = BacnetBipDriverService(name, all_config, driverkey, program) + else: + if protocol == BacMstp: + program = "/var/user/app/device_supervisor/BacnetmstpDriver" + service = BacnetMstpDriverService(name, all_config, driverkey, program) + else: + if protocol in [TANCYV13]: + program = "/var/user/app/device_supervisor/FlowmeterDriver" + service = FlowmeterDriverService(name, all_config, driverkey, program) + else: + if protocol == DLT645_1997: + program = "/var/user/app/device_supervisor/Dlt64597Driver" + service = Dlt64597DriverService(name, all_config, driverkey, program) + else: + if protocol == DLT645_2007: + program = "/var/user/app/device_supervisor/Dlt64507Driver" + service = Dlt64507DriverService(name, all_config, driverkey, program) + else: + if protocol in [Easycom, EasyEthernet]: + program = "/var/user/app/device_supervisor/EasyProtocolDriver" + service = EasyProtocolDriverService(name, all_config, driverkey, program) + else: + if protocol in [IEC101, IEC103, IEC104]: + program = "/var/user/app/device_supervisor/iecMaster" + service = Iec101MasterService(name, all_config, driverkey, program) + else: + if protocol == Euromap63: + program = "/var/user/app/device_supervisor/Euromap63Driver" + service = Euromap63DriverService(name, all_config, driverkey, program) + else: + if protocol in [DNP3Tcp, DNP3Udp, DNP3RTU]: + program = "/var/user/app/device_supervisor/Dnp3Driver" + service = Dnp3DriverService(name, all_config, driverkey, program) + else: + if protocol == Iec61850_MMS: + program = "/var/user/app/device_supervisor/iec61850Driver" + service = Iec61850DriverService(name, all_config, driverkey, program) + else: + if protocol == CJT188_2004: + program = "/var/user/app/device_supervisor/cjt188Driver" + service = CJT1882004DriverService(name, all_config, driverkey, program) + else: + if protocol == KeBaSocket: + program = "/var/user/app/device_supervisor/kebaSocketDriver" + service = kebaSocketDriverService(name, all_config, driverkey, program) + else: + if protocol in [Hj212_Serial]: + program = "/var/user/app/device_supervisor/HJ212Driver" + service = HJ212DriverService(name, all_config, driverkey, program) + else: + if protocol in [Hj2122005_Serial]: + program = "/var/user/app/device_supervisor/HJ21205Driver" + service = HJ2122005DriverService(name, all_config, driverkey, program) + else: + if protocol == VRCON: + return + program = "/var/user/app/device_supervisor/DriverAgent" + service = DriverAgentService(name, all_config, driverkey, program) + return service diff --git a/APPS_UNCOMPILED/src/master/Erlang.py b/APPS_UNCOMPILED/src/master/Erlang.py new file mode 100644 index 0000000..fc54ea3 --- /dev/null +++ b/APPS_UNCOMPILED/src/master/Erlang.py @@ -0,0 +1,736 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/Erlang.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 41475 bytes +import time, json, copy, base64, datetime, os, libevent +from common.Logger import logger +import common.ErlangAPI as ErlangAPI +import common.Utilities as Utilities +import common.InternalTopic as InternalTopic +from common.CloudType import Erlang as CloudTypeErlang +from common.CloudType import BaiYing as CloudTypeBaiYing +from common.ServiceID import MASTER_SERVICE_ID, ERLANG_MQTT_SERVICE_ID +from common.InternalPath import EC_SYSTEM_INFO + +class Erlang(object): + + def __init__(self, master): + super().__init__() + self.thing_name = "" + self.bind_cofig_info = {'devId':"", + 'devName':""} + self.cloud_south_version = "" + self.local_bind_config = dict() + self.master = master + self.ucfg = master.cfg.ucfg + self.schema = master.cfg_schema + self.base = master.base + self.mqclient = master.mqclient + self.on_cfg_editing = master.on_cfg_editing + self.configApi = master.configApi + self.erlang_api = ErlangAPI(self.mqclient, MASTER_SERVICE_ID) + self.saas_type = self.get_saas_type() + self.sub_remote_remote_message() + self.request_baying_time = 10 + self.request_baying_timer = libevent.Timer((self.base), (self.request_baying_timer_callback), userdata=None) + + def request_baying_timer_callback(self, evt, userdata): + for cloud in self.ucfg.clouds: + if cloud["type"] in [CloudTypeErlang, CloudTypeBaiYing]: + if cloud["enable"] == 1: + self.send_connect_info_to_erlang_agent(cloud, None, 1) + else: + self.send_connect_info_to_erlang_agent(cloud, None, 0) + + def get_cloud(self): + for _, cloud in self.schema.simple_config["device_supervisor"]["clouds"].items(): + if cloud["type"] in [CloudTypeBaiYing, CloudTypeErlang]: + return cloud + + def get_saas_type(self): + cloud = self.get_cloud() + if cloud is None: + return "" + return cloud["type"] + + def get_cloud_is_enable(self): + cloud = self.get_cloud() + if cloud is None or cloud["enable"] == 0: + return + return cloud + + def get_local_force_is_enable(self): + cloud = self.get_cloud() + if cloud is not None: + if "force" in cloud["args"]: + if int(cloud["args"]["force"]) == 0: + return False + return True + + def check_need_upload(self): + if self.get_cloud_is_enable(): + if self.ucfg.southMetadata["version"] != self.cloud_south_version: + return True + return False + + def sub_remote_remote_message(self): + self.mqclient.add_sub(InternalTopic.EVENT_BUS_REMOTE_DOWNSTREAM_PUBLISH.format(agentServiceId=ERLANG_MQTT_SERVICE_ID, + srcServiceId=MASTER_SERVICE_ID), self.remote_agent_message_handler) + self.mqclient.add_sub(InternalTopic.EVENT_BUS_SYSTEM_ERLANG_CONN_NOICE, self.agent_conn_message_handler) + logger.debug("Erlang sub agent topics") + + def require_agent_subscribe_topics(self): + if self.saas_type == CloudTypeErlang: + logger.info("SUB Erlang topics") + metadata_topic = InternalTopic.EVENT_BUS_ERLANG_METADATA.format(thingName=(self.thing_name), cloud="erlang") + upload_topic = InternalTopic.EVENT_BUS_ERLANG_UPLOAD.format(thingName=(self.thing_name), cloud="erlang") + download_topic = InternalTopic.EVENT_BUS_ERLANG_DOWNLOAD.format(thingName=(self.thing_name), cloud="erlang") + devinfo_topic = InternalTopic.EVENT_BUS_ERLANG_DEVICE_INFO.format(thingName=(self.thing_name), cloud="erlang") + else: + if self.saas_type == CloudTypeBaiYing: + logger.info("SUB BaiYing topics") + metadata_topic = InternalTopic.EVENT_BUS_ERLANG_METADATA.format(thingName=(self.thing_name), cloud="we") + upload_topic = InternalTopic.EVENT_BUS_ERLANG_UPLOAD.format(thingName=(self.thing_name), cloud="we") + download_topic = InternalTopic.EVENT_BUS_ERLANG_DOWNLOAD.format(thingName=(self.thing_name), cloud="we") + devinfo_topic = InternalTopic.EVENT_BUS_ERLANG_DEVICE_INFO.format(thingName=(self.thing_name), cloud="we") + else: + logger.error("SUB Erlang agent topics error") + return + upload_south_config = InternalTopic.EVENT_BUS_ERLANG_REQUEST_SOUTH_CONFIG.format(thingName=(self.thing_name)) + sub_topics = [metadata_topic, upload_topic, download_topic, devinfo_topic, upload_south_config] + for topic in sub_topics: + self.erlang_api.subscribe(topic) + + def agent_conn_message_handler(self, topic, payload): + if not self.get_cloud_is_enable(): + logger.warn("Erlang is not enable,conn return") + return + try: + payload = json.loads(payload) + logger.info("Erlang connection agent topic: %s , payload: %s" % (topic, payload)) + if 2 == payload["status"]: + self.connect_erlang() + except Exception as e: + try: + logger.warn("Erlang southConfig response error %s" % e) + finally: + e = None + del e + + def connect_erlang(self): + try: + if not self.erlang_api.read_erlang_connect_status_file(): + logger.warn("Erlang agent is not connected!") + return + else: + self.thing_name = self.erlang_api.read_erlang_thing_name(self.saas_type) + if not self.thing_name: + logger.warn("Erlang thing name is none") + return + self.require_agent_subscribe_topics() + time.sleep(0.1) + if self.saas_type == CloudTypeErlang: + topic = InternalTopic.EVENT_BUS_ERLANG_REQUEST_METADATA.format(thingName=(self.thing_name), cloud="erlang") + self.erlang_api.publish(topic) + topic = InternalTopic.EVENT_BUS_ERLANG_REQUEST_DEVICE_INFO.format(thingName=(self.thing_name), cloud="erlang") + self.erlang_api.publish(topic) + elif self.saas_type == CloudTypeBaiYing: + topic = InternalTopic.EVENT_BUS_ERLANG_REQUEST_METADATA.format(thingName=(self.thing_name), cloud="we") + self.erlang_api.publish(topic) + else: + logger.warn("Not support erlang cloud type") + except Exception as e: + try: + logger.warn("Erlang connect response error %s" % e) + finally: + e = None + del e + + def reconnect_erlang(self, cloud): + try: + logger.debug("Erlang reconnect agent") + res = self.send_connect_info_to_erlang_agent(cloud, None, 1) + if res == "host error": + raise ValueError("Invalid host %s" % cloud["args"]["host"]) + self.saas_type = cloud["type"] + logger.info("Erlang reconnect cloud type: %s" % self.saas_type) + self.connect_erlang() + except Exception as e: + try: + logger.warn("Erlang reconnect response error %s" % e) + finally: + e = None + del e + + def disconnect_erlang(self, cloud): + try: + logger.debug("Erlang disconnect agent") + self.send_connect_info_to_erlang_agent(cloud, True, 1) + except Exception as e: + try: + logger.warn("Erlang disconnect response error %s" % e) + finally: + e = None + del e + + def send_connect_info_to_erlang_agent(self, cloud, close=None, flag=0): + data = dict() + if flag: + if close is True: + data["config_source"] = 0 + data["mode"] = 0 + else: + if close is False: + data["config_source"] = 0 + data["mode"] = 1 + else: + data["config_source"] = 0 + data["mode"] = cloud["enable"] + data["cache_size"] = cloud["cacheSize"] + firmware = self.configApi.get_erlang_status_from_frontEnd() + if firmware and firmware["server_addr"] == "iscada.com.cn" and cloud["args"]["host"] == "iscada.inhandcloud.cn": + data["server_addr"] = "iscada.com.cn" + else: + data["server_addr"] = cloud["args"]["host"] + data["channel_keepalive"] = cloud["args"]["keepalive"] + else: + data["config_source"] = 0 + requestData = {} + if os.path.exists(EC_SYSTEM_INFO): + if "mode" not in data or "server_addr" not in data: + logger.error("mode or server_addr not in data") + return + else: + if data["mode"] == 1: + requestData["enable"] = True + else: + requestData["enable"] = False + if data["server_addr"].startswith("xenergy"): + requestData["server"] = "we" + else: + if data["server_addr"].startswith("iscada"): + requestData["server"] = "erlang" + else: + logger.error("unknown server_addr:%s" % data["server_addr"]) + if data["server_addr"].endswith(".cn"): + requestData["region"] = "cn" + else: + if data["server_addr"].endswith(".com"): + requestData["region"] = "us" + else: + if data["server_addr"].endswith(".dev"): + requestData["region"] = "dev" + else: + if data["server_addr"].endswith(".design"): + requestData["region"] = "beta" + else: + logger.error("unknown server_addr:%s" % data["server_addr"]) + else: + requestData = data + logger.debug("send_connect_info_to_erlang_agent data: %s" % requestData) + try: + ret = self.configApi.send_put_to_frontEnd(requestData) + if os.path.exists(EC_SYSTEM_INFO): + if ret == "failed": + logger.error("send_put_to_frontEnd failed") + self.request_baying_timer.add(self.request_baying_time) + except Exception as e: + try: + logger.error("erlang response error %s" % e) + return "host error" + finally: + e = None + del e + + def clear_bind_info(self): + logger.debug("Erlang clear bind info") + self.ucfg.bindMetadata["version"] = "" + self.ucfg.bindMetadata["timestamp"] = "" + self.ucfg.bindConfig["enable"] = 0 + self.ucfg.bindConfig["saas"] = 0 + for key in self.ucfg.bindConfig["bind"]: + self.ucfg.bindConfig["bind"][key] = "" + + self.ucfg.bindConfig["varGroups"].clear() + self.ucfg.bindConfig["variables"].clear() + self.ucfg.bindConfig["alerts"].clear() + + def generate_south_version(self): + for _, cloud in self.schema.simple_config["device_supervisor"]["clouds"].items(): + if cloud["type"] == CloudTypeErlang: + erlang_southconfig = self.erlang_get_south_config() + self.ucfg.southMetadata["version"] = erlang_southconfig["southMetadata"]["version"] + self.ucfg.southMetadata["timestamp"] = erlang_southconfig["southMetadata"]["timestamp"] + break + + def baiying_get_south_config(self): + config_data = dict() + config_data["controllers"] = list() + schema_ctls = self.schema.simple_config["device_supervisor"]["controllers"] + for con in list(schema_ctls.values()): + config_data["controllers"].append(con) + + config_data["measures"] = list() + schema_meas = self.schema.simple_config["device_supervisor"]["measures"] + for _, measures in schema_meas.items(): + for mea in list(measures.values()): + config_data["measures"].append(mea) + + config_data["groups"] = list() + schema_grps = self.schema.simple_config["device_supervisor"]["groups"] + for grp in list(schema_grps.values()): + config_data["groups"].append(grp) + + all_data = dict() + all_data["southConfig"] = config_data + all_data["southMetadata"] = dict() + all_data["southMetadata"]["version"] = self.erlang_api._generate_md5(data=config_data) + all_data["southMetadata"]["timestamp"] = int(time.time()) + return all_data + + def baiying_apply_bindconfig(self, config): + logger.debug("Baiying apply bindconfig: %s" % json.dumps(config["bindConfig"])) + self.ucfg.bindMetadata.clear() + self.ucfg.bindMetadata.update(config["bindMetadata"]) + self.ucfg.bindConfig.clear() + self.ucfg.bindConfig.update(config["bindConfig"]) + self.ucfg.bindConfig["bind"].update(self.bind_cofig_info) + self.ucfg.bindConfig["enable"] = 1 + self.ucfg.bindConfig["saas"] = 1 + + def baiying_apply_southconfig(self, config): + logger.debug("Baiying apply southconfig: %s" % json.dumps(config["southConfig"])) + if self.master.update_config_finsh: + if self.master.update_config_thread.is_alive(): + logger.info("saving configuration, stop it") + self.master.stop_python_thread(self.master.update_config_thread) + self.master.update_config_finsh = False + self.schema.save_persistant_config() + self.ucfg.controllers.clear() + self.ucfg.controllers = config["southConfig"]["controllers"] + self.ucfg.measures.clear() + self.ucfg.measures = config["southConfig"]["measures"] + self.ucfg.groups.clear() + self.ucfg.groups = config["southConfig"]["groups"] + + def erlang_apply_southconfig(self, config): + logger.debug("Erlang apply southconfig: %s" % json.dumps(config["southConfig"])) + self.master.cfg.load_config_file(config["southConfig"]) + + def erlang_apply_bindconfig(self, config): + logger.debug("Erlang apply bindconfig: %s" % json.dumps(config["bindConfig"])) + self.ucfg.bindMetadata.clear() + self.ucfg.bindMetadata.update(config["bindMetadata"]) + self.ucfg.bindConfig.clear() + self.ucfg.bindConfig.update(config["bindConfig"]) + self.ucfg.bindConfig["bind"].update(self.bind_cofig_info) + self.ucfg.bindConfig["enable"] = 1 + self.ucfg.bindConfig["saas"] = 0 + + def erlang_get_south_config(self): + config_data = dict() + config_data["controllers"] = list() + schema_ctls = self.schema.simple_config["device_supervisor"]["controllers"] + controllers = list(schema_ctls.values()) + for con in controllers: + con = copy.deepcopy(con) + config_data["controllers"].append(con) + + config_data["measures"] = list() + schema_meas = self.schema.simple_config["device_supervisor"]["measures"] + for _, measures in schema_meas.items(): + for mp in list(measures.values()): + new_mp = dict() + new_mp["name"] = mp["name"] + new_mp["ctrlName"] = mp["ctrlName"] + new_mp["uploadType"] = mp["uploadType"] + if mp["uploadType"] in ('periodic', 'onChange'): + new_mp["group"] = mp["group"] + new_mp["dataType"] = mp["dataType"] if "dataType" in mp else "" + new_mp["addr"] = mp["addr"] + if new_mp["dataType"] in ('FLOAT', 'DOUBLE'): + new_mp["decimal"] = mp["decimal"] if "decimal" in mp else 2 + if new_mp["dataType"] in ('STRING', ): + new_mp["len"] = mp["len"] if "len" in mp else 2 + new_mp["readWrite"] = mp["readWrite"] + if new_mp["dataType"] in ('BYTE', 'SINT', 'WORD', 'INT', 'DWORD', 'DINT', + 'BCD', 'BCD32', 'ULONG', 'LONG'): + new_mp["enableBit"] = mp["enableBit"] if "enableBit" in mp else 0 + if new_mp["enableBit"] == 1: + new_mp["bitIndex"] = mp["bitIndex"] if "bitIndex" in mp else 0 + new_mp["bitMap"] = int(mp["bitMap"]) if "bitMap" in mp else 0 + new_mp["reverseBit"] = int(mp["reverseBit"]) if "reverseBit" in mp else 0 + if new_mp["dataType"] == "BIT": + new_mp["reverseBit"] = int(mp["reverseBit"]) if "reverseBit" in mp else 0 + if mp["uploadType"] == "onChange": + if "enableBit" in mp: + if mp["enableBit"] == 0: + new_mp["deadZonePercent"] = mp["deadZonePercent"] if "deadZonePercent" in mp else "0" + elif mp["dataType"] in ('FLOAT', 'DOUBLE'): + new_mp["deadZonePercent"] = mp["deadZonePercent"] if "deadZonePercent" in mp else "0" + if "data_endian_reverse" in mp and "data_parse_method" in mp: + new_mp["data_endian_reverse"] = mp["data_endian_reverse"] + if new_mp["readWrite"] in ('rw', 'ro'): + new_mp["rd_offset"] = mp["rd_offset"] + new_mp["rd_length"] = mp["rd_length"] + if new_mp["readWrite"] in ('wo', 'rw'): + new_mp["wr_offset"] = mp["wr_offset"] + new_mp["wr_length"] = mp["wr_length"] + new_mp["data_parse_method"] = mp["data_parse_method"] + if new_mp["dataType"] == "BIT": + new_mp["bit_id"] = mp["bit_id"] + new_mp["unit"] = mp["unit"] + new_mp["desc"] = mp["desc"] + new_mp["transformType"] = mp["transformType"] + if mp["transformType"] == 1: + new_mp["maxValue"] = mp["maxValue"] + new_mp["minValue"] = mp["minValue"] + new_mp["maxScaleValue"] = mp["maxScaleValue"] + new_mp["minScaleValue"] = mp["minScaleValue"] + new_mp["transDecimal"] = mp["transDecimal"] + else: + if mp["transformType"] == 2: + new_mp["gain"] = mp["gain"] + new_mp["offset"] = mp["offset"] + new_mp["transDecimal"] = mp["transDecimal"] + else: + if mp["transformType"] == 3: + new_mp["startBit"] = mp["startBit"] + new_mp["endBit"] = mp["endBit"] + else: + if mp["transformType"] == 4: + new_mp["gain"] = mp["gain"] + new_mp["offset"] = mp["offset"] + new_mp["pt"] = mp["pt"] + new_mp["ct"] = mp["ct"] + new_mp["transDecimal"] = mp["transDecimal"] + else: + if mp["transformType"] == 5: + new_mp["mapping_table"] = mp["mapping_table"] + if new_mp["dataType"] == "BIT": + new_mp["bitMap"] = int(mp["bitMap"]) + if "msecSample" in mp: + new_mp["msecSample"] = int(mp["msecSample"]) + if "storageLwTSDB" in mp: + new_mp["storageLwTSDB"] = int(mp["storageLwTSDB"]) + if "arrayIndex" in mp: + new_mp["arrayIndex"] = int(mp["arrayIndex"]) + new_mp["pollCycle"] = int(mp["pollCycle"]) if "pollCycle" in mp else 0 + config_data["measures"].append(new_mp) + + config_data["groups"] = list() + schema_grps = self.schema.simple_config["device_supervisor"]["groups"] + for grp in list(schema_grps.values()): + config_data["groups"].append(grp) + + config_data["alarmLables"] = list() + schema_alaLabs = self.schema.simple_config["device_supervisor"]["alarmLables"] + for alaLab in schema_alaLabs: + config_data["alarmLables"].append(alaLab) + + config_data["alarms"] = list() + schema_alas = self.schema.simple_config["device_supervisor"]["alarms"] + for alm in list(schema_alas.values()): + config_data["alarms"].append(alm) + + all_data = dict() + all_data["southConfig"] = config_data + all_data["southMetadata"] = dict() + all_data["southMetadata"]["version"] = self.erlang_api._generate_md5(data=config_data) + all_data["southMetadata"]["timestamp"] = int(time.time()) + return all_data + + def upload_southconfig(self, payload=None): + self.thing_name = self.erlang_api.read_erlang_thing_name(self.saas_type) + if not self.thing_name: + logger.warn("Erlang thing name is none,can't upload southconfig") + return + if payload is None: + if self.saas_type == CloudTypeErlang: + topic = InternalTopic.EVENT_BUS_ERLANG_REQUEST_UPLOAD.format(thingName=(self.thing_name), cloud="erlang") + else: + if self.saas_type == CloudTypeBaiYing: + topic = InternalTopic.EVENT_BUS_ERLANG_REQUEST_UPLOAD.format(thingName=(self.thing_name), cloud="we") + else: + logger.error("Erlang Request upload southconfig error, saas_type: %s" % self.saas_type) + return + self.erlang_api.publish(topic) + else: + method = payload["httpMethod"] + url = payload["url"] + expired = payload["expiresIn"] + if self.saas_type == CloudTypeErlang: + data = self.erlang_get_south_config() + else: + if self.saas_type == CloudTypeBaiYing: + data = self.baiying_get_south_config() + else: + logger.error("Erlang Request upload southconfig error2, saas_type: %s" % self.saas_type) + return + self.cloud_south_version = data["southMetadata"]["version"] + self.erlang_api.upload_southconfig(method, url, expired, data=data) + + def download_southconfig(self, payload=None): + if payload is None: + if self.saas_type == CloudTypeErlang: + topic = InternalTopic.EVENT_BUS_ERLANG_REQUEST_DOWNLOAD.format(thingName=(self.thing_name), cloud="erlang") + else: + if self.saas_type == CloudTypeBaiYing: + topic = InternalTopic.EVENT_BUS_ERLANG_REQUEST_DOWNLOAD.format(thingName=(self.thing_name), cloud="we") + else: + logger.error("Erlang Request download southconfig error, saas_type: %s" % self.saas_type) + return + self.erlang_api.publish(topic) + else: + method = payload["httpMethod"] + url = payload["url"] + expired = payload["expiresIn"] + logger.info("Erlang Start download southconfig to erlang(method:%s, url:%s, expired:%s)" % (method, url, expired)) + if self.saas_type == CloudTypeErlang: + cfg = self.erlang_api.download_southconfig(method, url, expired, "erlang_southConfig.json") + else: + if self.saas_type == CloudTypeBaiYing: + cfg = self.erlang_api.download_southconfig(method, url, expired, "baiying_southConfig.json") + else: + logger.error("Erlang Request download southconfig error2, saas_type: %s" % self.saas_type) + return + self.cfg_update(cfg) + + def cfg_update(self, cfg): + if cfg is None: + return + try: + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + update_config = [] + local_south_metadata = self.ucfg.southMetadata + local_bind_metadata = self.ucfg.bindMetadata + if "version" not in local_south_metadata: + local_south_metadata["version"] = "" + elif "version" not in local_bind_metadata: + local_bind_metadata["version"] = "" + if "southMetadata" in cfg and cfg["southMetadata"]["version"] != local_south_metadata["version"]: + if not self.get_local_force_is_enable(): + if self.saas_type == CloudTypeErlang: + self.erlang_apply_southconfig(cfg) + else: + if self.saas_type == CloudTypeBaiYing: + self.baiying_apply_southconfig(cfg) + update_config.append("southConfig") + elif "bindMetadata" in cfg and cfg["bindMetadata"]["version"] != local_bind_metadata["version"]: + if self.saas_type == CloudTypeErlang: + self.erlang_apply_bindconfig(cfg) + else: + if self.saas_type == CloudTypeBaiYing: + self.baiying_apply_bindconfig(cfg) + update_config.append("bindConfig") + if "southConfig" in update_config: + self.schema.load_persistant_config() + self.on_cfg_editing(wait_time=1) + elif "bindConfig" in update_config: + self.on_cfg_editing(wait_time=1) + except Exception as e: + try: + logger.warn("BaiYing cfg_update error %s" % e) + finally: + e = None + del e + + def baiying_metadata_topic_handler(self, payload): + if not payload or len(payload) == 0: + logger.info("BaiYing metadata topic payload is none,upload southconfig") + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + self.upload_southconfig() + if self.ucfg.bindConfig["enable"] == 1: + self.clear_bind_info() + self.on_cfg_editing(wait_time=1) + return + cloud_south_metadata = dict() + cloud_bind_metadata = dict() + if "southMetadata" in payload: + cloud_south_metadata = payload["southMetadata"] + self.cloud_south_version = cloud_south_metadata["version"] + elif "bindMetadata" in payload: + cloud_bind_metadata = payload["bindMetadata"] + else: + local_south_metadata = self.ucfg.southMetadata + local_bind_metadata = self.ucfg.bindMetadata + if "version" not in local_south_metadata: + local_south_metadata["version"] = "" + elif "version" not in local_bind_metadata: + local_bind_metadata["version"] = "" + logger.debug("cloud_south_metadata:%s" % cloud_south_metadata) + logger.debug("local_south_metadata:%s" % local_south_metadata) + logger.debug("cloud_bind_metadata:%s" % cloud_bind_metadata) + logger.debug("local_bind_metadata:%s" % local_bind_metadata) + if cloud_bind_metadata: + if cloud_south_metadata["version"] != local_south_metadata["version"]: + self.get_local_force_is_enable() or logger.info("Erlang south version diff, and cloud first") + self.download_southconfig() + else: + if cloud_bind_metadata["version"] != local_bind_metadata["version"]: + logger.info("Erlang bind version diff") + self.download_southconfig() + else: + if cloud_south_metadata["version"] != local_south_metadata["version"] and self.get_local_force_is_enable(): + logger.info("Erlang south version diff, and local first") + self.upload_southconfig() + else: + if self.ucfg.bindConfig["enable"] == 1: + logger.info("Erlang bind is none, clear bind info") + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + self.clear_bind_info() + self.on_cfg_editing(wait_time=1) + if cloud_south_metadata["version"] == local_south_metadata["version"]: + return + self.upload_southconfig() + + def baiying_upload_south_config_handler(self, payload): + self.upload_southconfig() + result = dict() + result["requestId"] = payload["requestId"] + result["status"] = "succeeded" + result["timestamp"] = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") + topic = InternalTopic.EVENT_BUS_ERLANG_RESPONSE_SOUTH_CONFIG.format(thingName=(self.thing_name)) + result = json.dumps(result) + self.erlang_api.publish(topic, result) + + def baiying_agent_message_handler(self, topic, payload): + metadata_topic = InternalTopic.EVENT_BUS_ERLANG_METADATA.format(thingName=(self.thing_name), cloud="we") + upload_topic = InternalTopic.EVENT_BUS_ERLANG_UPLOAD.format(thingName=(self.thing_name), cloud="we") + download_topic = InternalTopic.EVENT_BUS_ERLANG_DOWNLOAD.format(thingName=(self.thing_name), cloud="we") + devinfo_topic = InternalTopic.EVENT_BUS_ERLANG_DEVICE_INFO.format(thingName=(self.thing_name), cloud="we") + upload_south_config = InternalTopic.EVENT_BUS_ERLANG_REQUEST_SOUTH_CONFIG.format(thingName=(self.thing_name), cloud="we") + if topic == metadata_topic: + logger.info("BaiYing recv metadata topic") + self.baiying_metadata_topic_handler(payload) + else: + if topic == upload_topic: + logger.info("BaiYing recv upload topic") + self.upload_southconfig(payload) + else: + if topic == download_topic: + logger.info("BaiYing recv download topic") + self.download_southconfig(payload) + else: + if topic == devinfo_topic: + logger.info("BaiYing recv devinfo topic") + else: + if topic == upload_south_config: + logger.info("BaiYing recv upload_south_config topic") + self.baiying_upload_south_config_handler(payload) + else: + logger.info("BaiYing recv unknown topic %s" % topic) + + def erlang_metadata_topic_handler(self, payload): + if not payload or len(payload) == 0: + logger.info("Erlang metadata topic payload is none,upload southconfig") + self.upload_southconfig() + return + cloud_south_metadata = dict() + cloud_bind_metadata = dict() + if "southMetadata" in payload: + cloud_south_metadata = payload["southMetadata"] + self.cloud_south_version = cloud_south_metadata["version"] + elif "bindMetadata" in payload: + cloud_bind_metadata = payload["bindMetadata"] + else: + local_south_metadata = self.ucfg.southMetadata + local_bind_metadata = self.ucfg.bindMetadata + if "version" not in local_south_metadata: + local_south_metadata["version"] = "" + elif "version" not in local_bind_metadata: + local_bind_metadata["version"] = "" + logger.debug("cloud_south_metadata:%s" % cloud_south_metadata) + logger.debug("local_south_metadata:%s" % local_south_metadata) + logger.debug("cloud_bind_metadata:%s" % cloud_bind_metadata) + logger.debug("local_bind_metadata:%s" % local_bind_metadata) + if cloud_bind_metadata: + if cloud_south_metadata["version"] != local_south_metadata["version"]: + self.get_local_force_is_enable() or self.download_southconfig() + else: + if cloud_bind_metadata["version"] != local_bind_metadata["version"]: + self.download_southconfig() + else: + if cloud_south_metadata["version"] != local_south_metadata["version"] and self.get_local_force_is_enable(): + self.upload_southconfig() + else: + if cloud_south_metadata["version"] == local_south_metadata["version"]: + return + self.upload_southconfig() + + def erlang_agent_message_handler(self, topic, payload): + metadata_topic = InternalTopic.EVENT_BUS_ERLANG_METADATA.format(thingName=(self.thing_name), cloud="erlang") + upload_topic = InternalTopic.EVENT_BUS_ERLANG_UPLOAD.format(thingName=(self.thing_name), cloud="erlang") + download_topic = InternalTopic.EVENT_BUS_ERLANG_DOWNLOAD.format(thingName=(self.thing_name), cloud="erlang") + devinfo_topic = InternalTopic.EVENT_BUS_ERLANG_DEVICE_INFO.format(thingName=(self.thing_name), cloud="erlang") + upload_south_config = InternalTopic.EVENT_BUS_ERLANG_REQUEST_SOUTH_CONFIG.format(thingName=(self.thing_name), cloud="erlang") + if topic == metadata_topic: + logger.info("Erlang recv metadata topic") + self.erlang_metadata_topic_handler(payload) + else: + if topic == upload_topic: + logger.info("Erlang recv upload topic") + self.upload_southconfig(payload) + else: + if topic == download_topic: + logger.info("Erlang recv download topic") + self.download_southconfig(payload) + else: + if topic == devinfo_topic: + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + logger.info("Erlang recv devinfo topic") + if not payload: + self.ucfg.bindConfig["enable"] = 0 + self.ucfg.bindConfig["saas"] = 0 + for subitem in self.ucfg.bindConfig["bind"]: + self.ucfg.bindConfig["bind"][subitem] = "" + + self.ucfg.bindConfig["varGroups"].clear() + self.ucfg.bindConfig["variables"].clear() + self.ucfg.bindConfig["alerts"].clear() + self.ucfg.bindMetadata["version"] = "" + else: + dev_id = payload["devId"] + dev_name = payload["devName"] + bind_cfg = self.ucfg.bindConfig + bind_cfg["enable"] = 1 + bind_cfg["saas"] = 0 + self.bind_cofig_info = {'devId':dev_id, + 'devName':dev_name} + bind_cfg["bind"].update(self.bind_cofig_info) + self.on_cfg_editing(wait_time=1) + else: + if topic == upload_south_config: + logger.info("Erlang recv upload_south_config topic") + else: + logger.warn("Erlang recv unknown topic %s" % topic) + + def remote_agent_message_handler(self, topic, payload): + if not self.get_cloud_is_enable(): + logger.warn("Erlang is not enable") + return + try: + payload = payload if not isinstance(payload, (str, bytes)) else Utilities.json_to_obj(payload) + if "topic" not in payload: + logger.error("other message %s" % payload) + return + else: + topic = payload["topic"] + payload = json.loads(base64.b64decode(payload["payload"])) + logger.info("Erlang agent recv message: %s , payload: %s" % (topic, payload)) + if self.saas_type == CloudTypeErlang: + self.erlang_agent_message_handler(topic, payload) + else: + if self.saas_type == CloudTypeBaiYing: + self.baiying_agent_message_handler(topic, payload) + else: + logger.error("Unknown cloud type %s" % self.saas_type) + except Exception as e: + try: + logger.warn("Erlang remote_agent_message_handler error %s" % e) + finally: + e = None + del e diff --git a/APPS_UNCOMPILED/src/master/Master.py b/APPS_UNCOMPILED/src/master/Master.py new file mode 100644 index 0000000..965b666 --- /dev/null +++ b/APPS_UNCOMPILED/src/master/Master.py @@ -0,0 +1,568 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/Master.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 30778 bytes +import os, signal, libevent, ctypes, inspect, threading, random, string, requests, copy, json +from common.Logger import logger +import master.Web as Web +import master.Erlang as Erlang +import master.Config as Config +import master.DriverTls as DriverTls +import master.ConfigSchema as ConfigSchema +from common.MQClient import MQClientLibevent +from common.MobiusAPI import MobiusPi, ConfigAPI +from common.CloudType import Erlang as CloudTypeErlang +from common.CloudType import BaiYing as CloudTypeBaiYing +from common.CloudType import SparkPlugB as CloudTypeSparkPlugB +from common.ServiceID import IEC104_SLAVE_SERVICE_ID, OPCUA_SERVER_SERVICE_ID, IEC104_CLIENT_SERVICE_ID, IEC101_SLAVE_SERVICE_ID, SL651_SLAVE_SERVICE_ID, HJ212_CLIENT_SERVICE_ID, MODBUSTCP_SLAVE_SERVICE_ID, MODBUSRTU_SLAVE_SERVICE_ID, BACNET_SERVER_SERVICE_ID, DNP3_SERVER_SERVICE_ID, IEC61850_SERVER_SERVICE_ID, SNMP_AGENT_SERVICE_ID +from master.Services import ServiceManager, DriverService, QuickFaaSService, GenericMQTTAgentService, OpcuaServerService, DataHubService, IEC104ClientService, IEC104SlaveService, IEC101SlaveService, Sl651SlaveService, HJ212ClientService, MosquittoService, ModbusTCPSlaveService, ModbusRTUSlaveService, BacnetServerService, Dnp3ServerService, iec61850ServerService, snmpAgentService, MindSpherePutService +service_c_zlog_config = '\n[global]\nstrict init = true\n\nbuffer min = 1024\nbuffer max = 2MB\n\nrotate lock file = /var/run/python/zlog.lock\n\nfile perms = 600\nfsync period = 1K\n\n[formats]\ndebugfm = "%d(%F %T),%ms <%V> [%F:%L] %m%n"\nconsolefm = "%d(%F %T),%ms <%V> [%F:%L] %m%n"\nsyslogfm = "%d(%F %T),%ms %V [%E(MYNAME)]: %m%n"\n\n[rules]\nmy_.DEBUG "/var/run/python/%E(MYNAME).log", 2M * 1; debugfm\nmy_.{LOG_LEVEL} >stdout; syslogfm\n' +public_key = "-----BEGIN PUBLIC KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAzPf35uZQrRdOpk7RhnCx\nqx8r3GsrHv1EAvCJwVGlBaTL+/2OUftSNNztwpJIjc8eTO4Dyq+Pb/H6MV/0veaj\naiabyngJ/Go72n9s5jUMh63BOQGYQ3wKTVu+K8Em95FvGCGrGjkTniR4pJ6mU7mZ\nPH7JnTtpVW7O+rdUrvq6ZMOgGzCK5DHMeSDGinnOs/e68KrkVFGsl+KhMFVai6AD\nFkX45lM73/aWFqTBnsWogLOsmVZU8M8NLRLPyee8tpQgNaKlpZP3LMAIAPg/HMBj\natVUXuuJN7gfM+S9NjZxQbIdg2NntTW3Gc0R33xPfnp32d/yV7sXei8HtcB2Y0Y5\nHDtyk6DFsJutvHxOmeQnl0rmlMzSIVB134LjT0pjFbwQ44hm+avBLGVwpoP0hQnv\nEqb9qr/6ApyWNS4BNyetROcyMacUF5g5bdohQW6ULTfHXQo/EzMIEaTdN2RHfWpR\n8yqNCGazRIcvivtVL+pr0QtpImDd8X/n4JOuybKT0uJRmrMwJnm1WIFkgTnSJKmT\nKSW+qqWS8sctmdf6G3yxxqYTh5IgYtEHrVB4+LNEJhvjuA2vdDV8Vh5ZOdFnapxY\nz7GgxjJhd9Wfn8aBG8OyzXOO+7UCzmeDQUhbh0ig1HE1CpQNIwWpVQ434DhrmASF\nyfNaDgb69G4WhF/9BU/hNUECAwEAAQ==\n-----END PUBLIC KEY-----" + +class Master(object): + + def __init__(self, vendor_name, app_name): + self.base = libevent.Base() + self.vendor_name = vendor_name + self.app_name = app_name + self.erlang = None + self.erlang_upload_config_delay_time = 20 + self.erlang_upload_config_timer = libevent.Timer((self.base), (self.erlang_upload_config_timer_handler), userdata=None) + self.update_config_thread = None + self.web_handler = None + self.web_update_config_timer = libevent.Timer((self.base), (self.web_update_config_timer_handler), userdata=None) + self.update_config_finsh = False + self.service_manager = None + self.driverkeys = list() + self.controller_names = list() + self.mobiuspi = MobiusPi() + self.mqclient = None + self._init_mqtt_channel() + self.configApi = ConfigAPI() + self.cfg = Config(self.mobiuspi, self) + if self.mobiuspi.model_name[None[:2]].startswith("EC") or self.mobiuspi.model_name[None[:3]].startswith("HEC"): + self.ec_update_log_timer = libevent.Timer((self.base), (self.cfg.ec_update_log_timer_handler), userdata=None) + self.cfg.load_config_file() + self.cfg_schema = ConfigSchema(self.cfg) + self.sig_int = libevent.Signal(self.base, signal.SIGINT, self._on_signal_handler) + self.sig_int.add() + self.sig_term = libevent.Signal(self.base, signal.SIGTERM, self._on_signal_handler) + self.sig_term.add() + + def _init_mqtt_channel(self): + client_id = "master@" + "".join(random.sample(string.ascii_letters + string.digits, 8)) + self.mqclient = MQClientLibevent(self.base, client_id) + self.mqclient._on_connected = self.on_connected + if self.mobiuspi.model_name[None[:2]].startswith("EC") or self.mobiuspi.model_name[None[:3]].startswith("HEC"): + self.mqclient.set_mq_info(client_id, target_username="admin", target_passwd="admin") + self.mqclient.init_mqclient() + self.mqclient.connect() + + def ec_verify_gateway(self): + import random, string + from Crypto.Hash import SHA256 + from Crypto.PublicKey import RSA + from Crypto.Signature import PKCS1_v1_5 + random_string = "".join((random.choice(string.ascii_letters + string.digits) for _ in range(random.randint(128, 4096)))) + try: + with open("/dev/ih-verify", "w") as fp: + fp.write(random_string) + with open("/dev/ih-verify", "rb") as f: + sign = f.read() + sha256 = SHA256.new(random_string.encode()) + verifier = PKCS1_v1_5.new(RSA.importKey(public_key)) + verify_result = verifier.verify(sha256, sign) + except Exception as error: + try: + logger.error("%s" % error) + verify_result = False + finally: + error = None + del error + + if not verify_result: + raise ValueError("Verify failed! Please use inhand gateway!!!") + + def deinit(self): + """ + stop subprocess + """ + self.service_manager.stop_all_service() + + def _on_signal_handler(self, evt, fd, userdata): + logger.info("Received SIGINT/SIGTERM, exit...") + self.base.loopexit(0) + + def on_connected(self, client): + logger.info("local mqtt connected ...") + if self.erlang.get_cloud_is_enable(): + self.erlang.connect_erlang() + + def _add_driver_name(self, name, driverkey): + for service in self.service_manager.services: + if not hasattr(service, "add_name"): + continue + if not isinstance(service, DriverService): + continue + if service.driverkey == driverkey: + service.add_name(name) + + def _add_driver(self, name, controller, driverkey): + service = DriverTls.build_each_driver_service(name, controller, self.cfg, driverkey) + if service is None: + return False + self.service_manager.add_service(service) + return True + + def add_driver_services(self): + for controller in self.cfg.ucfg.controllers: + driverkey = DriverTls.build_driverkey_by_controller(controller) + if driverkey not in self.driverkeys and controller["name"] not in self.controller_names: + if self._add_driver(controller["name"], controller, driverkey): + self.driverkeys.append(driverkey) + self.controller_names.append(controller["name"]) + else: + self._add_driver_name(controller["name"], driverkey) + + def stop_python_thread(self, thread): + """raises the exception, performs cleanup if needed""" + exctype = SystemExit + tid = ctypes.c_long(thread.ident) + if not inspect.isclass(exctype): + exctype = type(exctype) + else: + res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(exctype)) + if res == 0: + raise ValueError("invalid thread id") + else: + if res != 1: + ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, None) + raise SystemError("PyThreadState_SetAsyncExc failed") + + def __find_invaild_services(self): + services = list() + for service in self.service_manager.services: + if not isinstance(service, DriverService): + continue + for controller_name in service.controller_names: + controller = self.cfg.find_ctl_name(controller_name) + if controller is False: + services.append(service) + break + driverkey = DriverTls.build_driverkey_by_controller(controller) + if service.driverkey != driverkey: + services.append(service) + break + + return services + + def _clear_invaild_driver_services(self): + services = self._Master__find_invaild_services() + for service in services: + self.service_manager.del_service(service) + self.driverkeys.remove(service.driverkey) + self.controller_names.remove(service.name) + + def _add_or_delete_controller_driver(self): + self._clear_invaild_driver_services() + self.add_driver_services() + + def _find_ucfg_vaild_cloud(self, name): + for cloud in self.cfg.ucfg.clouds: + if cloud["type"] == CloudTypeErlang or cloud["type"] == CloudTypeBaiYing: + continue + else: + if cloud["enable"] == 0: + continue + if cloud["name"] == name: + return cloud + + def _find_invaild_mqttAgent_services(self): + services = list() + for service in self.service_manager.services: + if not isinstance(service, GenericMQTTAgentService): + continue + if self._find_ucfg_vaild_cloud(service.name) is None: + services.append(service) + + return services + + def _clear_invaild_mqttAgent_service(self): + services = self._find_invaild_mqttAgent_services() + for service in services: + self.service_manager.del_service(service) + + def _find_mqttAgent_service_by_name(self, name): + for service in self.service_manager.services: + if not isinstance(service, GenericMQTTAgentService): + continue + if service.name == name: + return service + + def _add_vaild_mqttAgent_services(self): + for cloud in self.cfg.ucfg.clouds: + if cloud["type"] == CloudTypeErlang or cloud["type"] == CloudTypeBaiYing: + continue + else: + if cloud["enable"] == 0: + continue + if self._find_mqttAgent_service_by_name(cloud["name"]) is None: + self.service_manager.add_service(GenericMQTTAgentService(self.cfg, cloud["name"])) + + def _add_or_delete_mqttAgent_service(self): + self._clear_invaild_mqttAgent_service() + self._add_vaild_mqttAgent_services() + + def _find_ucfg_vaild_mindsphere(self, name): + for mindsphere in self.cfg.ucfg.mindspheres: + if mindsphere["enable"] == 0: + continue + if mindsphere["name"] == name: + return mindsphere + + def _find_invaild_mindsphere_services(self): + services = list() + for service in self.service_manager.services: + if not isinstance(service, MindSpherePutService): + continue + if self._find_ucfg_vaild_mindsphere(service.name) is None: + services.append(service) + + return services + + def _clear_invaild_mindsphere_service(self): + services = self._find_invaild_mindsphere_services() + for service in services: + self.service_manager.del_service(service) + + def _find_mindsphere_service_by_name(self, name): + for service in self.service_manager.services: + if not isinstance(service, MindSpherePutService): + continue + if service.name == name: + return service + + def _add_vaild_mindsphere_services(self): + for mindsphere in self.cfg.ucfg.mindspheres: + if mindsphere["enable"] == 0: + continue + if self._find_mindsphere_service_by_name(mindsphere["name"]) is None: + self.service_manager.add_service(MindSpherePutService(self.cfg, mindsphere["name"])) + + def _add_or_delete_mindsphere_service(self): + self._clear_invaild_mindsphere_service() + self._add_vaild_mindsphere_services() + + def add_or_delete_service(self): + self._add_or_delete_mqttAgent_service() + self._add_or_delete_mindsphere_service() + service = self.service_manager.find_service_by_id(MODBUSTCP_SLAVE_SERVICE_ID) + if service and self.cfg.ucfg.modbusSlave["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.modbusSlave["enable"] == 1: + self.service_manager.add_service(ModbusTCPSlaveService(self.cfg, self.cfg_schema)) + else: + service = self.service_manager.find_service_by_id(MODBUSRTU_SLAVE_SERVICE_ID) + if service and self.cfg.ucfg.modbusRTUSlave["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.modbusRTUSlave["enable"] == 1: + self.service_manager.add_service(ModbusRTUSlaveService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(OPCUA_SERVER_SERVICE_ID) + if service and self.cfg.ucfg.opcuaServer["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.opcuaServer["enable"] == 1: + self.service_manager.add_service(OpcuaServerService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(IEC104_CLIENT_SERVICE_ID) + if service and self.cfg.ucfg.iec104Client["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.iec104Client["enable"] == 1: + self.service_manager.add_service(IEC104ClientService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(IEC104_SLAVE_SERVICE_ID) + if service and self.cfg.ucfg.iec104Server["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.iec104Server["enable"] == 1: + self.service_manager.add_service(IEC104SlaveService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(IEC101_SLAVE_SERVICE_ID) + if service and self.cfg.ucfg.iec101Server["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.iec101Server["enable"] == 1: + self.service_manager.add_service(IEC101SlaveService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(SL651_SLAVE_SERVICE_ID) + if service and self.cfg.ucfg.sl651Slave["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.sl651Slave["enable"] == 1: + self.service_manager.add_service(Sl651SlaveService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(HJ212_CLIENT_SERVICE_ID) + if service and self.cfg.ucfg.hj212Client["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.hj212Client["enable"] == 1: + self.service_manager.add_service(HJ212ClientService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(BACNET_SERVER_SERVICE_ID) + if service and self.cfg.ucfg.bacnetServer["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.bacnetServer["enable"] == 1: + self.service_manager.add_service(BacnetServerService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(DNP3_SERVER_SERVICE_ID) + if service and self.cfg.ucfg.Dnp3Server["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.Dnp3Server["enable"] == 1: + self.service_manager.add_service(Dnp3ServerService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(IEC61850_SERVER_SERVICE_ID) + if service and self.cfg.ucfg.iec61850Server["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.iec61850Server["enable"] == 1: + self.service_manager.add_service(iec61850ServerService(self.cfg, self.cfg_schema)) + service = self.service_manager.find_service_by_id(SNMP_AGENT_SERVICE_ID) + if service and self.cfg.ucfg.snmpAgent["enable"] == 0: + self.service_manager.del_service(service) + else: + if service is None: + if self.cfg.ucfg.snmpAgent["enable"] == 1: + self.service_manager.add_service(snmpAgentService(self.cfg, self.cfg_schema)) + self._add_or_delete_controller_driver() + + def erlang_upload_config_timer_handler(self, evt, userdata): + if self.erlang.get_cloud_is_enable(): + self.erlang.upload_southconfig() + + def start_upload_southconfig_to_erlang(self): + self.erlang_upload_config_timer.delete() + logger.info("Upload southconfig to erlang after %s seconds" % self.erlang_upload_config_delay_time) + self.erlang_upload_config_timer.add(self.erlang_upload_config_delay_time) + + def http_put_dsa_config_update(self, url, data): + logger.debug("url:%s, data:%s" % (url, json.dumps(data))) + response = requests.put(url, data=(json.dumps(data)), verify=False) + if response.status_code != 200: + logger.error("http put dsa_config_update failed, status_code:%s" % response.status_code) + else: + results = json.loads(response.text) + if "result" in results and results["result"] != "ok": + logger.error("http put dsa_config_update failed, results:%s" % results) + else: + logger.debug("http put dsa_config_update success, results:%s" % results) + + def diff_dict(self, dict1, dict2): + all_keys = set(dict1.keys()).union(set(dict2.keys())) + diff = {} + for key in all_keys: + if isinstance(dict1.get(key), dict) and isinstance(dict2.get(key), dict): + nested_diff = self.diff_dict(dict1.get(key), dict2.get(key)) + if nested_diff: + diff[key] = nested_diff + elif dict1.get(key) != dict2.get(key): + diff[key] = dict2.get(key) if dict2.get(key) else None + + return diff + + def work(self): + self.update_config_finsh = True + logger.debug("Thread save config to flash...") + self.cfg_schema.save_persistant_config() + logger.debug("Thread restart subprocess...") + self.add_or_delete_service() + self.start_subprocess() + if self.erlang.check_need_upload(): + self.start_upload_southconfig_to_erlang() + if self.mobiuspi.model_name[None[:2]].startswith("EC") or self.mobiuspi.model_name[None[:3]].startswith("HEC"): + url = "http://127.0.0.1:9100/api/v1/dsa_config_update" + data = dict() + data["device_supervisor"] = self.diff_dict(self.cfg_schema.simple_config_old["device_supervisor"], self.cfg_schema.simple_config["device_supervisor"]) + self.http_put_dsa_config_update(url, data) + self.cfg_schema.simple_config_old = copy.deepcopy(self.cfg_schema.simple_config) + logger.debug("Thread update config done") + self.update_config_finsh = False + if self.mobiuspi.model_name[None[:2]].startswith("EC") or self.mobiuspi.model_name[None[:3]].startswith("HEC"): + if self.web_handler.ec_need_restart_container: + logger.info("Storage path changed! need restart container") + os.system("kill -2 `ps | grep '/usr/bin/supervisord' | awk 'NR==1' | awk '{print $1}'`") + + def web_update_config_timer_handler(self, evt=None, userdata=None): + logger.debug("Start update config thread...") + self.update_config_thread = threading.Thread(target=(self.work)) + self.update_config_thread.start() + + def on_cfg_editing(self, wait_time=8): + logger.debug("Config have been update") + self.erlang.generate_south_version() + self.web_update_config_timer.delete() + self.web_update_config_timer.add(wait_time) + + def _build_debuglog_files_list(self): + debuglog_files = [] + for service in self.service_manager.services: + if isinstance(service, DriverService): + for ctrlName in service.controller_names: + if service.driverkey == "DriverAgent": + debug_log_file = "%s.log" % ctrlName + debuglog_files.append(debug_log_file) + debuglog_files.append(debug_log_file + ".1") + else: + controller = self.web_handler.find_ctl_name(ctrlName) + if controller is False: + logger.warn("controller(%s) not found" % ctrlName) + continue + debug_log_file = "%s_%s.log" % (controller["protocol"], service.service_id) + debuglog_files.append(debug_log_file) + debuglog_files.append(debug_log_file + ".1") + break + + return debuglog_files + + def clear_invalid_debuglog(self, path): + if not os.path.exists(path): + return + debuglog_files = self._build_debuglog_files_list() + dirs = os.listdir(path) + for filename in dirs: + filepath = path + "/" + filename + if not os.path.isfile(filepath): + continue + if filename not in debuglog_files: + os.remove(filepath) + logger.info("clear invaild debugLog:%s" % filepath) + + def init(self): + self.erlang = Erlang(self) + self.web_handler = Web(self) + self.service_manager = ServiceManager(self.cfg, self.cfg_schema, self.base, self.mqclient, self.mobiuspi, self.erlang.send_connect_info_to_erlang_agent) + + def stop_service(self, service_id=0): + for service in self.service_manager.services: + if service_id and service.service_id == service_id: + service.stop() + + def start_service(self, service_id=0): + for service in self.service_manager.services: + if service_id and service.service_id == service_id: + service.generate_config_file() + service.start() + + def _generate_zlog_conf(self): + LOG_LEVEL = self.cfg.ucfg.misc["logLvl"].upper() + if LOG_LEVEL == "WARNING": + zlog_config = service_c_zlog_config.format(LOG_LEVEL="WARN") + else: + zlog_config = service_c_zlog_config.format(LOG_LEVEL=LOG_LEVEL) + zlog_config_file = self.cfg.app_path + "/zlog.conf" + with open((zlog_config_file + ".tmp"), "w", encoding="utf-8") as f: + f.write(zlog_config) + os.rename(zlog_config_file + ".tmp", zlog_config_file) + + def start_subprocess_by_type(self, service_type): + for service in self.service_manager.services: + if isinstance(service, service_type): + service.generate_config_file() + if isinstance(service, DataHubService): + if service.popen is None: + logger.info("Start subprocess name:%s, service_id:%s" % (service.name, service.service_id)) + service.start() + if service.be_modified: + if service.service_id < 2000: + logger.info("Restart subprocess name:%s, service_id:%s" % (service.name, service.service_id)) + service.restart() + if service.name == "QuickFaaS": + for cloud in self.cfg.ucfg.clouds: + if cloud["type"] == CloudTypeSparkPlugB and cloud["enable"] == 1: + for mqttAgentService in self.service_manager.services: + if mqttAgentService.name == cloud["name"]: + mqttAgentService.be_modified = True + mqttAgentService.restart() + break + + else: + restart_service = 0 + for controller_name in service.controller_names: + for controller in self.cfg.ucfg.controllers: + if controller["name"] == controller_name and controller["enable"] == 1: + restart_service = 1 + + if restart_service: + logger.info("Restart subprocess name:%s, service_id:%s" % (service.name, service.service_id)) + service.restart() + for cloud in self.cfg.ucfg.clouds: + if cloud["type"] == CloudTypeSparkPlugB and cloud["enable"] == 1: + for mqttAgentService in self.service_manager.services: + if mqttAgentService.name == cloud["name"]: + mqttAgentService.be_modified = True + mqttAgentService.restart() + break + + else: + logger.info("Stop subprocess name:%s, service_id:%s" % (service.name, service.service_id)) + service.stop() + + def start_subprocess(self): + self._generate_zlog_conf() + self.start_subprocess_by_type(MosquittoService) + self.start_subprocess_by_type(DataHubService) + self.start_subprocess_by_type(QuickFaaSService) + self.start_subprocess_by_type(MindSpherePutService) + self.start_subprocess_by_type(ModbusTCPSlaveService) + self.start_subprocess_by_type(ModbusRTUSlaveService) + self.start_subprocess_by_type(OpcuaServerService) + self.start_subprocess_by_type(IEC104ClientService) + self.start_subprocess_by_type(IEC104SlaveService) + self.start_subprocess_by_type(IEC101SlaveService) + self.start_subprocess_by_type(Sl651SlaveService) + self.start_subprocess_by_type(HJ212ClientService) + self.start_subprocess_by_type(BacnetServerService) + self.start_subprocess_by_type(Dnp3ServerService) + self.start_subprocess_by_type(iec61850ServerService) + self.start_subprocess_by_type(snmpAgentService) + self.start_subprocess_by_type(GenericMQTTAgentService) + self.start_subprocess_by_type(DriverService) + + def start_process(self): + try: + self.add_driver_services() + self.clear_invalid_debuglog(self.cfg.ucfg.misc["debugLogPath"]) + self.start_subprocess() + self.service_manager.keepalive_timer.add(self.service_manager.keepalive_time) + self.service_manager.ping_timer.add(self.service_manager.ping_time) + except Exception as error: + try: + logger.error("Start subprocess failed(%s)" % error) + finally: + error = None + del error + + def run(self): + self.start_process() + self.base.loop() + if self.update_config_finsh: + if self.update_config_thread.is_alive(): + logger.info("update config thread is doing, stop it") + self.stop_python_thread(self.update_config_thread) + self.update_config_finsh = False + + +if __name__ == "__main__": + app = Master("InHand", "DS2") + app.init() + app.run() diff --git a/APPS_UNCOMPILED/src/master/Services.py b/APPS_UNCOMPILED/src/master/Services.py new file mode 100644 index 0000000..a0e31df --- /dev/null +++ b/APPS_UNCOMPILED/src/master/Services.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/Services.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 72493 bytes diff --git a/APPS_UNCOMPILED/src/master/Web.py b/APPS_UNCOMPILED/src/master/Web.py new file mode 100644 index 0000000..f54e99f --- /dev/null +++ b/APPS_UNCOMPILED/src/master/Web.py @@ -0,0 +1,5628 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/Web.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 341725 bytes +import os +from pickle import NONE +import re, csv, json, copy, types, string, shutil, time, base64, requests, threading, collections +from common.Logger import logger +from common.Error import RequestError +import common.Utilities as Utilities +from common.Constant import WebRequest +from common.MQClient import MqttSetting +from common.MobiusAPI import ConfigAPI +from common.Permission import RW, WO, RO +from common.SendType import Periodic, OnChange, Never +import common.ConfigParser as ConfigParser +from common.CloudType import StandardMQTT, InspurYunzhou, AWSIoT, AliyunIoT, AzureIoT, HeilanV1_0, HeilanV2_0, SparkPlugB +from common.CloudType import Erlang as CloudTypeErlang +from common.CloudType import BaiYing as CloudTypeBaiYing +from master.Services import gl_cloud_serviceId_dict +from master.ConfigSchema import simple_config_map_array +from master.ConfigSchema import simple_config_generate_array_key +from master.ConfigSchema import simple_config_save_user_config +from master.ConfigMerge import merge_patch_rfc7396_save_null, merge_patch_rfc7396 +from common.DataType import Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, Float, String, Bcd, Ulong, Long, Double, Bcd32 +from common.InternalPath import CLOUD_MEASURE_CSV_FILE_NAME, CONFIG_TMP_PATH, MEASURE_CSV_FILE_NAME, GROUP_CSV_FILE_NAME, ALARM_CSV_FILE_NAME, CLOUD_CSV_FILE_NAME, SLAVE_CSV_FILE_NAME, IEC101_CSV_FILE_NAME, OPCUA_PUB_SUB_GROUP_CSV_FILE_NAME, SL651_CSV_FILE_NAME, IEC104_CSV_FILE_NAME, LABELS_CSV_FILE_NAME, GENFUNS_CSV_FILE_NAME, ALARMLABLES_CSV_FILE_NAME, HJ212_CSV_FILE_NAME, BACNET_CSV_FILE_NAME, DNP3_CSV_FILE_NAME, IEC61850_CSV_FILE_NAME, SNMP_CSV_FILE_NAME +import common.InternalTopic as InternalTopic +from common.Protocol import ABCC, ABMC, ABSLC, BFADS, DLT698, FJSPB, GESRPT, KEMC, MC3E_UDP, MCFL, MCR, OMCCN, OMCN, OMHLC, PANMC, SIFW, XJIN, YKGWLT, DLT645_OverTcp, DTAscii, DTSerial, DTSerial_OverTcp, DTTcp, FJSPB_OverTcp, FJSPHNet, IV_OverTcp, IV_TcpNet, IVSerial, KENano, KENano_OverTcp, MCFL_OverTcp, MCFS_OverTcp, MbRtu, MbTcp, MbAscii, MbRtuOverTcp, OMHL_OverTcp, OMHLC_OverTcp, OpcUa, OpcDa, PANMEW_OverTcp, SI_WebApi, SIPPI_OverTcp, Snap7, Eip, MC1C, MC3C, MC4C, MC1E, MC3E, MCPS, MC3COT, VRCON, OMFT, OMFU, OMHLS, SIPPI, EipPCCC, DLT645_2007, DLT645_1997, PANMEW, TANCYV13, IEC101, IEC104, IEC103, TATEKPROGRAM, TATEKPROGRAM_OverTcp, Easycom, VGSerial, VGSerial_OverTcp, XJSerial, XJSerial_OverTcp, XJTcpNet, EasyEthernet, CNCFS, BacBip, BacMstp, Euromap63, DNP3Tcp, DNP3Udp, DNP3RTU, RobotEfort, RobotAbb, RobotFanuc, Iec61850_MMS, KeBaSocket, CJT188_2004, MbRtuSlave, Hj212_Serial, Hj2122005_Serial, OpcUa_PubSub +from common.AlarmPolicy import AlarmEqual, AlarmNotEqual, AlarmGreater, AlarmLess, AlarmGreaterEqual, AlarmLessEqual, AlarmAnd, AlarmOr, AlarmNone +WEB_REQUEST_PREFIX = "httpreq" +WEB_RESPONSE_PREFIX = "httprsp" +GET_CHANNEL_TOPIC = WEB_REQUEST_PREFIX + "/v1/api/dsconfig/get/#" +PUT_CHANNEL_TOPIC = WEB_REQUEST_PREFIX + "/v1/api/dsconfig/put/#" +CLOUD_STATUS = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/cloud/status/#" +IEC104CLIENT_STATUS = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/iec104/client/status/#" +ERLANG_BIND_INFO = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/" + "erlang/bind/info" + "/#" +ERLANG_BIND_GROUP_INFO = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/" + "erlang/bind/group" + "/#" +CLEAR_OFFLINE_DATA_PUT = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/" + "empty/offlinedata/put" + "/#" +GET_AB_LOGIN_CONFIG = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/ab/login/config/get/#" +GET_AB_CLOULD_CONFIG = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/ab/clould/config/put/#" +GET_AB_DEVICE_CONFIG = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/ab/device/config/get/#" +UPDATA_AB_DEVICE_CONFIG = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/ab/device/config/put/#" +NORTH_BASIC_STATUS = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/north/basic/status/#" +DSA_SHOW_LOG = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/show/log/get/#" +DSA_LOG_ACTION = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/log/action/get/#" +DSA_EXPORT_LOG = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/export/log/get/#" +DSA_EXPORT_LOG_STATUS = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/export/log/status/get/#" +DSA_CONFIG_ACTION = WEB_REQUEST_PREFIX + "/v1/apps/device/supervisor2/config/action/post/#" + +class Web: + + def __init__(self, master) -> None: + self.mqchannel = master.mqclient + self.servers = master.service_manager + self.erlang = master.erlang + self.schema = master.cfg_schema + self.on_cfg_editing = master.on_cfg_editing + self.cfg = master.cfg + self.master = master + self.names = list() + self.ucfg = self.cfg.ucfg + self.payload_transfer = Utilities.payload_transfer + self.cfg_api = ConfigAPI() + self._register_channel() + self.conf_parser = ConfigParser() + self.ERROR_KEYS = RequestError.ERROR_KEYS + self.ERROR_VALUES_DICT = RequestError.ERROR_VALUES_DICT + self.ERROR_KEYS_WITH_PARAMS = RequestError.ERROR_KEYS_WITH_PARAMS + self.ERROR_VALUES_DICT_WITH_PARAMS = RequestError.ERROR_VALUES_DICT_WITH_PARAMS + self.update_opcuaserver_cert_by_content(self.ucfg.opcuaServer) + for cloud in self.ucfg.clouds: + self.update_cert_by_content(cloud) + + for controller in self.ucfg.controllers: + if controller["protocol"] == "OPC-UA" and controller["args"]["auth"] == "certificate": + self.update_opcuaDriver_cert_by_content(controller) + + self.thing_name = "" + self.export_log_thread = None + self.export_log_status = dict() + self.ec_need_restart_container = False + + def _register_channel(self) -> None: + self.mqchannel.add_sub(GET_CHANNEL_TOPIC, self.on_cfg_get) + self.mqchannel.add_sub(PUT_CHANNEL_TOPIC, self.on_cfg_put) + self.mqchannel.add_sub(WebRequest().IMPORT_CONFIG, self.on_import_file) + self.mqchannel.add_sub(WebRequest().EXPORT_CONFIG, self.on_export_file) + self.mqchannel.add_sub(CLOUD_STATUS, self.cloud_status) + self.mqchannel.add_sub(IEC104CLIENT_STATUS, self.iec104client_status) + self.mqchannel.add_sub(NORTH_BASIC_STATUS, self.north_basic_status) + self.mqchannel.add_sub(ERLANG_BIND_INFO, self.on_erlang_bind_info) + self.mqchannel.add_sub(ERLANG_BIND_GROUP_INFO, self.on_erlang_bind_group_info) + self.mqchannel.add_sub(CLEAR_OFFLINE_DATA_PUT, self.clear_offline_data) + self.mqchannel.add_sub(GET_AB_LOGIN_CONFIG, self.get_ab_login_config) + self.mqchannel.add_sub(GET_AB_CLOULD_CONFIG, self.get_ab_cloud_config) + self.mqchannel.add_sub(GET_AB_DEVICE_CONFIG, self.get_ab_device_config) + self.mqchannel.add_sub(UPDATA_AB_DEVICE_CONFIG, self.update_ab_device_config) + self.mqchannel.add_sub(InternalTopic.EVENT_BUS_SYSTEM_ALIYUN_UPDATE_CONFIG, self.aliyun_update_config) + self.mqchannel.add_sub(InternalTopic.EVENT_BUS_SYSTEM_GROUP_UPDATE_CONFIG, self.group_update_config) + self.mqchannel.add_sub(InternalTopic.EVENT_BUS_SYSTEM_GET_CONFIG, self.get_config) + self.mqchannel.add_sub(InternalTopic.EVENT_BUS_SYSTEM_UPDATE_CONFIG, self.update_config) + self.mqchannel.add_sub(DSA_SHOW_LOG, self.show_log_callback) + self.mqchannel.add_sub(DSA_LOG_ACTION, self.log_action_callback) + self.mqchannel.add_sub(DSA_EXPORT_LOG, self.export_log_callback) + self.mqchannel.add_sub(DSA_EXPORT_LOG_STATUS, self.export_log_status_callback) + self.mqchannel.add_sub(DSA_CONFIG_ACTION, self.config_action_callback) + + def on_cfg_get(self, topic, payload): + """ + :param topic: + :param payload: + :return: + """ + try: + logger.debug("master receive topic: %s , payload: %s" % (topic, payload)) + payload = Utilities.payload_transfer(payload) + fields = payload["fields"].split(",") + res = self.schema.get_fields(fields) + data = self.wrap_response_data(res) + self.build_response_data(data, topic) + return + except Exception as e: + try: + logger.warn("Response error: %s" % e) + finally: + e = None + del e + + data = self.wrap_response_data("failed") + self.build_response_data(data, topic) + + def on_cfg_put(self, topic, payload): + try: + if self.master.update_config_finsh: + if self.master.update_config_thread.is_alive(): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11024])) + return self.build_response_data(data, topic) + else: + logger.debug("master receive topic: %s , payload: %s" % (topic, payload)) + if isinstance(payload, (str, bytes)): + payload = json.loads(payload) + return self.schema.put(payload) or self.pub_invalid_response_by_params(topic, -1003) + result = self.verify_parameter(payload["device_supervisor"]) + if result: + logger.error("verify parameter error, %s" % result) + return self.build_response_data(result, topic) + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + self.clear_invaild_config(payload["device_supervisor"]) + self.schema.merge_patch(payload) + self.on_cfg_editing() + logger.set_level(level=(self.schema.simple_config["device_supervisor"]["misc"]["logLvl"])) + if self.master.mobiuspi.model_name in ('EC942', 'EC954'): + self.cfg.set_coms_mode(self.schema.simple_config["device_supervisor"]["misc"]["coms"], self.master.mobiuspi.model_name) + self.build_response_data(self.wrap_response_data("ok"), topic) + except Exception as error: + try: + logger.error("PUT request handle failed(%s)" % error) + self.pub_invalid_response_by_params(topic, -1003) + finally: + error = None + del error + + def verify_parameter(self, payload): + for key in payload.keys(): + if key == "quickfaas": + return self.verify_script(payload["quickfaas"]) + + def verify_script(self, quickfaas): + schmea_quickfaas = self.schema.simple_config["device_supervisor"]["quickfaas"] + if "uploadFuncs" in quickfaas: + for cloud_name, quick in quickfaas["uploadFuncs"].items(): + if quick: + for x in quick: + if cloud_name in schmea_quickfaas["uploadFuncs"]: + if x not in schmea_quickfaas["uploadFuncs"][cloud_name]: + return self.verify_script_validity(x["funcName"], x["script"]) + else: + return self.verify_script_validity(x["funcName"], x["script"]) + + else: + if "downloadFuncs" in quickfaas: + for cloud_name, quick in quickfaas["downloadFuncs"].items(): + if quick: + for x in quick: + if cloud_name in schmea_quickfaas["downloadFuncs"]: + if x not in schmea_quickfaas["downloadFuncs"][cloud_name]: + return self.verify_script_validity(x["funcName"], x["script"]) + else: + return self.verify_script_validity(x["funcName"], x["script"]) + + else: + if "genericFuncs" in quickfaas: + for item in [x for x in quickfaas["genericFuncs"] if x not in schmea_quickfaas["genericFuncs"]]: + return self.verify_script_validity(item["funcName"], item["script"]) + + def verify_script_validity(self, function_name, script): + try: + mymodule = types.ModuleType("mymodule") + exec(script, mymodule.__dict__) + eval("mymodule.%s" % function_name) + except Exception as error: + try: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11012])) + data["params"] = str(error) + return data + finally: + error = None + del error + + def sync_opcuaServer_file_to_content(self, opcuaServer, opcuaServer_new): + if "pubsub_group" in opcuaServer: + self.delete_opcua_pubsub_group_to_syn_config(opcuaServer["pubsub_group"]) + else: + schemaOpcuaServer = self.schema.simple_config["device_supervisor"]["opcuaServer"] + if "config" in opcuaServer: + if opcuaServer["config"]["pubsub"] == 1: + if self.schema.simple_config["device_supervisor"]["opcuaServer"]["config"]["pubsub"] == 0: + for key, value in list(schemaOpcuaServer["mapping_table"].items()): + schemaOpcuaServer["mapping_table"].pop(key) + + if "config" not in opcuaServer: + return + if "certificate" in opcuaServer_new and os.path.exists(opcuaServer_new["config"]["certificate"]): + with open(opcuaServer_new["config"]["certificate"], "rb") as content: + opcuaServer_new["config"]["certificate_content"] = str(base64.b64encode(content.read()), "utf-8") + else: + opcuaServer_new["config"]["certificate"] = "" + opcuaServer_new["config"]["certificate_content"] = "" + if "privateKey" in opcuaServer_new and os.path.exists(opcuaServer_new["config"]["privateKey"]): + with open(opcuaServer_new["config"]["privateKey"], "rb") as content: + opcuaServer_new["config"]["privateKey_content"] = str(base64.b64encode(content.read()), "utf-8") + else: + opcuaServer_new["config"]["privateKey"] = "" + opcuaServer_new["config"]["privateKey_content"] = "" + + def sync_iec61850Server_config(self, iec61850Server): + if "config" not in iec61850Server: + return + schemaiec61850Server = self.schema.simple_config["device_supervisor"]["iec61850Server"] + newLDRef = iec61850Server["config"]["iedName"] + iec61850Server["config"]["LDName"] + for key, value in list(schemaiec61850Server["mapping_table"].items()): + LDRef = value["daRef"].split("/")[0] + other = value["daRef"].split("/")[1] + if LDRef != newLDRef: + value["daRef"] = newLDRef + "/" + other + + def clear_invaild_config(self, payload): + for key in payload.keys(): + if key in self.schema.simple_config["device_supervisor"]: + temp_config = copy.deepcopy(self.schema.simple_config["device_supervisor"][key]) + temp_config = merge_patch_rfc7396_save_null(temp_config, payload[key]) + if key == "controllers": + self.add_controller_to_init_measure_list(temp_config, payload[key]) + elif key == "measures": + self.delete_measures_to_syn_config(temp_config) + elif key == "groups": + self.delete_groups_to_syn_config(temp_config) + elif key == "alarms": + self.delete_alarms_to_syn_config(temp_config) + elif key == "alarmLables": + self.delete_alarmLables_to_syn_config(temp_config) + elif key == "hj212Client": + self.delete_and_change_hj212_block_to_syn_config(temp_config) + elif key == "iec104Server": + self.delete_104_asdu_to_syn_config(temp_config) + elif key == "iec101Server": + self.delete_101_asdu_to_syn_config(temp_config) + elif key == "opcuaServer": + self.sync_opcuaServer_file_to_content(temp_config, payload[key]) + elif key == "iec61850Server": + self.sync_iec61850Server_config(temp_config) + elif key == "clouds": + self.switch_cloud_to_syn_config(temp_config, payload[key]) + elif key == "misc": + self.cloud_cache_to_syn_config(temp_config, payload[key]) + elif key == "labels": + self.labels_to_syn_config(temp_config, payload[key]) + + def cloud_cache_to_syn_config(self, misc, new_misc): + if "cachePath" in misc: + old_cachePath = self.schema.simple_config["device_supervisor"]["misc"]["cachePath"] + if misc["cachePath"] != old_cachePath: + os.system("rm -fr %s" % old_cachePath) + if misc["cacheMode"] == "gateway" or self.cfg.is_storage_exist(misc["cachePath"]): + os.system("mkdir -p %s" % misc["cachePath"]) + else: + logger.warn("Offline data storage path(%s) nonexistence" % misc["cachePath"]) + if not self.master.mobiuspi.model_name[None[:2]].startswith("EC"): + if self.master.mobiuspi.model_name[None[:3]].startswith("HEC"): + self.ec_need_restart_container = True + elif "debugLogPath" in misc: + old_debugLogPath = self.schema.simple_config["device_supervisor"]["misc"]["debugLogPath"] + if misc["debugLogPath"] != old_debugLogPath: + os.system("rm -fr %s" % old_debugLogPath) + if misc["debugLogMode"] == "gateway" or self.cfg.is_storage_exist(misc["debugLogPath"]): + os.system("mkdir -p %s" % misc["debugLogPath"]) + else: + logger.warn("Communication message storage path(%s) nonexistence" % misc["debugLogPath"]) + if self.master.mobiuspi.model_name[None[:2]].startswith("EC") or self.master.mobiuspi.model_name[None[:3]].startswith("HEC"): + self.ec_need_restart_container = True + if "coms" in misc: + old_coms = self.schema.simple_config["device_supervisor"]["misc"]["coms"] + self.schema.simple_config_old = merge_patch_rfc7396(self.schema.simple_config_old, new_misc) + new_coms = list() + for old_com in old_coms: + have_com = 0 + for new_com in misc["coms"]: + if old_com["name"] == new_com["name"]: + if self.master.mobiuspi.model_name in ('EC942', 'EC954'): + if "mode" not in new_com: + new_com["mode"] = "rs485" + new_coms.append(new_com) + have_com = 1 + break + + if have_com == 0: + new_coms.append(old_com) + + new_misc["coms"] = new_coms + + def sync_opcuaDriver_file_to_content(self, con_val=None): + if "certificate" in con_val["args"] and os.path.exists(con_val["args"]["certificate"]): + with open(con_val["args"]["certificate"], "rb") as content: + con_val["args"]["certificate_content"] = str(base64.b64encode(content.read()), "utf-8") + else: + con_val["args"]["certificate"] = "" + con_val["args"]["certificate_content"] = "" + if "privateKey" in con_val["args"] and os.path.exists(con_val["args"]["privateKey"]): + with open(con_val["args"]["privateKey"], "rb") as content: + con_val["args"]["privateKey_content"] = str(base64.b64encode(content.read()), "utf-8") + else: + con_val["args"]["privateKey"] = "" + con_val["args"]["privateKey_content"] = "" + + def sync_BACnetMSTP_clientNodeAddr(self, schema_conId, con_val): + for old_conId in schema_conId: + old_con = self.schema.simple_config["device_supervisor"]["controllers"][old_conId] + if old_con["protocol"] == "BACnet/MSTP" and old_con["endpoint"] == con_val["endpoint"] and old_con["args"]["clientNodeAddr"] != con_val["args"]["clientNodeAddr"]: + old_con["args"]["clientNodeAddr"] = con_val["args"]["clientNodeAddr"] + + def add_controller_to_init_measure_list(self, controllers, controllers_new): + schema_conId = list(self.schema.simple_config["device_supervisor"]["controllers"].keys()) + for con_id, con_val in controllers.items(): + if con_val and con_id not in schema_conId: + self.schema.simple_config["device_supervisor"]["measures"][con_val["name"]] = collections.OrderedDict() + if con_val["protocol"] == "BACnet/MSTP": + self.sync_BACnetMSTP_clientNodeAddr(schema_conId, con_val) + else: + if con_val["protocol"] == "OPC-UA": + if con_val["args"]["auth"] == "certificate": + if con_id in controllers_new: + self.sync_opcuaDriver_file_to_content(con_val=(controllers_new[con_id])) + else: + if con_val: + if con_id in schema_conId: + old_ctrlName = self.schema.simple_config["device_supervisor"]["controllers"][con_id]["name"] + logger.debug("modify_ctrlName_sync_config con name %s" % str(old_ctrlName)) + self.modify_ctrlName_sync_config(con_val["name"], old_ctrlName) + if con_val["name"] != old_ctrlName: + self.schema.simple_config["device_supervisor"]["measures"][con_val["name"]] = self.schema.simple_config["device_supervisor"]["measures"][old_ctrlName] + self.schema.simple_config["device_supervisor"]["measures"].pop(old_ctrlName) + if con_val["protocol"] == "BACnet/MSTP": + self.sync_BACnetMSTP_clientNodeAddr(schema_conId, con_val) + if con_val["protocol"] == "OPC-UA" and con_val["args"]["auth"] == "certificate" and con_id in controllers_new: + self.sync_opcuaDriver_file_to_content(con_val=(controllers_new[con_id])) + + def delete_measures_to_syn_config(self, measures): + for ctl_name, ctl_measures in measures.items(): + if ctl_name not in self.schema.simple_config["device_supervisor"]["measures"]: + continue + mea_list = self.schema.simple_config["device_supervisor"]["measures"][ctl_name] + if ctl_measures: + for measure_id, measure_value in ctl_measures.items(): + if measure_value is None: + self.clear_invaild_config_by_measure(mea_list[measure_id]) + elif measure_id not in mea_list: + continue + if "ctrlName" in measure_value and "name" in measure_value: + old_meaName = mea_list[measure_id]["name"] + self.modify_measure_params_sync_config(old_meaName, measure_value) + + else: + for measure_id, measure_value in mea_list.items(): + self.clear_invaild_config_by_measure(measure_value) + + def delete_groups_to_syn_config(self, groups): + group_name = list() + schema_grs = self.schema.simple_config["device_supervisor"]["groups"] + for group_id, group_value in groups.items(): + if group_value is None: + group_name.append(schema_grs[group_id]["name"]) + self.clear_measures_by_group(group_id) + elif group_id in schema_grs and schema_grs[group_id]["historyDataPath"] != group_value["historyDataPath"]: + os.system("rm -f %s" % (schema_grs[group_id]["historyDataPath"] + "/" + schema_grs[group_id]["name"] + ".zip.db")) + if group_value["historyDataMode"] == "gateway" or self.cfg.is_storage_exist(group_value["historyDataPath"]): + os.system("mkdir -p %s" % group_value["historyDataPath"]) + + def clear_measures_by_group(self, group_id): + group = self.schema.simple_config["device_supervisor"]["groups"][group_id] + schemaMeasures = self.schema.simple_config["device_supervisor"]["measures"] + for ctl_name, ctl_measures in list(schemaMeasures.items()): + for mea_id, mea in list(ctl_measures.items()): + if "group" in mea and mea["group"] == group["name"]: + schemaMeasures[ctl_name].pop(mea_id) + self.clear_invaild_config_by_measure(mea) + + def delete_opcua_pubsub_group_to_syn_config(self, groups): + for group_id, group_value in groups.items(): + if group_value is None: + self.clear_opcua_map_by_pubsub_group(group_id) + + def clear_opcua_map_by_pubsub_group(self, group_id): + group = self.schema.simple_config["device_supervisor"]["opcuaServer"]["pubsub_group"][group_id] + schemaOpcuaServer = self.schema.simple_config["device_supervisor"]["opcuaServer"]["mapping_table"] + for key, value in list(schemaOpcuaServer.items()): + if "pubGroupId" in value: + if group["group_type"] == "pub": + for id in value["pubGroupId"]: + if id == group["group_id"]: + schemaOpcuaServer.pop(key) + break + + if "subGroupId" in value and group["group_type"] == "sub": + for id in value["subGroupId"]: + if id == group["group_id"]: + schemaOpcuaServer.pop(key) + break + + def delete_uploadFuncs_group_list(self, upload_groups, del_groups): + for gr in del_groups: + if gr in upload_groups: + upload_groups.remove(gr) + + return upload_groups + + def clear_uploadFuncs_by_group(self, del_groups): + uploadFuncs = self.schema.simple_config["device_supervisor"]["quickfaas"]["uploadFuncs"] + for _, cloud_uploadFuncs in list(uploadFuncs.items()): + for item in list(cloud_uploadFuncs): + if item["trigger"] == "measure_event": + self.delete_uploadFuncs_group_list(item["groups"], del_groups) or cloud_uploadFuncs.remove(item) + + def delete_mindsphereputs_group_list(self, upload_groups, del_groups): + for gr in del_groups: + if gr in upload_groups: + upload_groups.remove(gr) + + return upload_groups + + def clear_mindsphereputs_by_group(self, del_groups): + mindspheres = self.schema.simple_config["device_supervisor"]["mindspheres"] + for _, mindsphere in list(mindspheres.items()): + for mindsphereput in mindsphere["mindsphereputs"]: + if not self.delete_mindsphereputs_group_list(mindsphereput["groups"], del_groups): + mindsphere["mindsphereputs"].remove(mindsphereput) + + def delete_alarms_to_syn_config(self, alarms): + for alarm_id, alarm_value in alarms.items(): + if alarm_value is None: + alarm_list = self.schema.simple_config["device_supervisor"]["alarms"][alarm_id] + self.clear_invaild_config_by_alarm(alarm_list) + + def delete_alarmLables_to_syn_config(self, alarmLables): + alarmLable_list = self.schema.simple_config["device_supervisor"]["alarmLables"] + alarmLable_dels = [x for x in alarmLable_list if x not in alarmLables] + if alarmLable_dels: + alarms = self.schema.simple_config["device_supervisor"]["alarms"] + for alarm_id, alarm_value in list(alarms.items()): + if alarm_value["alarmLable"] in alarmLable_dels: + alarms.pop(alarm_id) + self.clear_invaild_config_by_alarm(alarm_value) + + self.clear_invaild_config_by_alarmLable(alarmLable_dels) + + def clear_invaild_config_by_measure(self, mea): + """ + 删除一个测点,需要同步以下信息: + 1. alarms + 2. modbusSlave mapping_table + 3. modbusRTUSlave mapping_table + 4. iec104Server mapping_table + 5. iec101Server mapping_table + 6. opcuaServer mapping_table + 7. bindCfg variables + 8. bindCfg alerts + 9. iec104Client mapping_table + 10. cloud uploadRules + 11. mindsphereput uploadRules + """ + schemaAlarm = self.schema.simple_config["device_supervisor"]["alarms"] + for key, value in list(schemaAlarm.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemaAlarm.pop(key) + self.clear_invaild_config_by_alarm(value) + + schemaModbusSlave = self.schema.simple_config["device_supervisor"]["modbusSlave"]["mappingTable"] + for mappingTableKey, mappingTableValue in list(schemaModbusSlave.items()): + for key, value in list(mappingTableValue["measures"].items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + mappingTableValue["measures"].pop(key) + + schemaModbusRTUSlave = self.schema.simple_config["device_supervisor"]["modbusRTUSlave"]["mappingTable"] + for mappingTableKey, mappingTableValue in list(schemaModbusRTUSlave.items()): + for key, value in list(mappingTableValue["measures"].items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + mappingTableValue["measures"].pop(key) + + schemaIec104Server = self.schema.simple_config["device_supervisor"]["iec104Server"]["mapping_table"] + for key, value in list(schemaIec104Server.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemaIec104Server.pop(key) + + iec101Server_YX = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YX"] + for key, value in list(iec101Server_YX.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + iec101Server_YX.pop(key) + + iec101Server_YC = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YC"] + for key, value in list(iec101Server_YC.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + iec101Server_YC.pop(key) + + iec101Server_YK = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YK"] + for key, value in list(iec101Server_YK.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + iec101Server_YK.pop(key) + + schemaOpcuaServer = self.schema.simple_config["device_supervisor"]["opcuaServer"]["mapping_table"] + for key, value in list(schemaOpcuaServer.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemaOpcuaServer.pop(key) + + schemaSl651Slave = self.schema.simple_config["device_supervisor"]["sl651Slave"]["mapping_table"] + for key, value in list(schemaSl651Slave.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemaSl651Slave.pop(key) + + schemahj212Client = self.schema.simple_config["device_supervisor"]["hj212Client"]["mapping_table"] + for key, value in list(schemahj212Client.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemahj212Client.pop(key) + + schemaBacnetServer = self.schema.simple_config["device_supervisor"]["bacnetServer"]["mapping_table"] + for key, value in list(schemaBacnetServer.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemaBacnetServer.pop(key) + + schemaDnp3Server = self.schema.simple_config["device_supervisor"]["Dnp3Server"]["mapping_table"] + for key, value in list(schemaDnp3Server.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemaDnp3Server.pop(key) + + schemaiec61850Server = self.schema.simple_config["device_supervisor"]["iec61850Server"]["mapping_table"] + for key, value in list(schemaiec61850Server.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemaiec61850Server.pop(key) + + schemaSnmpAgentServer = self.schema.simple_config["device_supervisor"]["snmpAgent"]["mapping_table"] + for key, value in list(schemaSnmpAgentServer.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + schemaSnmpAgentServer.pop(key) + + bindVars = self.ucfg.bindConfig["variables"] + for var in bindVars: + if mea["ctrlName"] == var["ctrlName"] and mea["name"] == var["measureName"]: + self.ucfg.bindConfig["variables"].remove(var) + + iec104Client_YX = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YX"] + for key, value in list(iec104Client_YX.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + iec104Client_YX.pop(key) + + iec104Client_YC = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YC"] + for key, value in list(iec104Client_YC.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + iec104Client_YC.pop(key) + + iec104Client_YK = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YK"] + for key, value in list(iec104Client_YK.items()): + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + iec104Client_YK.pop(key) + + for i in self.schema.simple_config["device_supervisor"]["clouds"]: + if "uploadRules" in self.schema.simple_config["device_supervisor"]["clouds"][i]: + upload_rules = self.schema.simple_config["device_supervisor"]["clouds"][i]["uploadRules"] + for value in upload_rules: + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + upload_rules.remove(value) + + for i in self.schema.simple_config["device_supervisor"]["mindspheres"]: + for mindsphereput in self.schema.simple_config["device_supervisor"]["mindspheres"][i]["mindsphereputs"]: + if "uploadRules" in mindsphereput: + upload_rules = mindsphereput["uploadRules"] + for value in upload_rules: + if mea["ctrlName"] == value["ctrlName"] and mea["name"] == value["measureName"]: + upload_rules.remove(value) + + def modify_ctrlName_sync_config(self, new_ctrlName, old_ctrlName): + """ + 修改一个控制器的名称需要同步以下信息: + 1. measures + 2. alarms + 3. modbusSlave mapping_table + 4. modbusRTUSlave mapping_table + 5. iec104Server mapping_table + 6. iec101Server mapping_table + 7. opcuaServer mapping_table + 8. iec104Client mapping_table + 9. cloud uploadRules + 10. mindsphereput uploadRules + """ + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][old_ctrlName] + for key, value in list(schemaMeasure.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemaAlarm = self.schema.simple_config["device_supervisor"]["alarms"] + for key, value in list(schemaAlarm.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemaModbusSlave = self.schema.simple_config["device_supervisor"]["modbusSlave"]["mappingTable"] + for mappingTableKey, mappingTableValue in list(schemaModbusSlave.items()): + for key, value in list(mappingTableValue["measures"].items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemaModbusRTUSlave = self.schema.simple_config["device_supervisor"]["modbusRTUSlave"]["mappingTable"] + for mappingTableKey, mappingTableValue in list(schemaModbusRTUSlave.items()): + for key, value in list(mappingTableValue["measures"].items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemaIec104Server = self.schema.simple_config["device_supervisor"]["iec104Server"]["mapping_table"] + for key, value in list(schemaIec104Server.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + iec101Server_YX = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YX"] + for key, value in list(iec101Server_YX.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + iec101Server_YC = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YC"] + for key, value in list(iec101Server_YC.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + iec101Server_YK = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YK"] + for key, value in list(iec101Server_YK.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemaOpcuaServer = self.schema.simple_config["device_supervisor"]["opcuaServer"]["mapping_table"] + for key, value in list(schemaOpcuaServer.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + if self.schema.simple_config["device_supervisor"]["opcuaServer"]["config"]["identifierType"] == "String": + ns = value["nodeId"].split(";")[0] + s = value["nodeId"].split(";")[1] + s_value = s.split("=")[1] + s_value_measure = s_value.split(".")[1] + value["nodeId"] = ns + ";s=" + new_ctrlName + "." + s_value_measure + + schemaSl651Slave = self.schema.simple_config["device_supervisor"]["sl651Slave"]["mapping_table"] + for key, value in list(schemaSl651Slave.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemahj212Client = self.schema.simple_config["device_supervisor"]["hj212Client"]["mapping_table"] + for key, value in list(schemahj212Client.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemaBacnetServer = self.schema.simple_config["device_supervisor"]["bacnetServer"]["mapping_table"] + for key, value in list(schemaBacnetServer.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemaDnp3Server = self.schema.simple_config["device_supervisor"]["Dnp3Server"]["mapping_table"] + for key, value in list(schemaDnp3Server.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + schemaiec61850Server = self.schema.simple_config["device_supervisor"]["iec61850Server"]["mapping_table"] + for key, value in list(schemaiec61850Server.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + LnInst = value["daRef"].split(".", 1)[0] + other = value["daRef"].split(".", 1)[1] + LdName = LnInst.split("/", 1)[0] + value["daRef"] = LdName + "/" + value["ctrlName"] + "." + other + + schemaSnmpAgent = self.schema.simple_config["device_supervisor"]["snmpAgent"]["mapping_table"] + for key, value in list(schemaSnmpAgent.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + iec104Client_YX = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YX"] + for key, value in list(iec104Client_YX.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + iec104Client_YC = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YC"] + for key, value in list(iec104Client_YC.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + iec104Client_YK = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YK"] + for key, value in list(iec104Client_YK.items()): + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + for i in self.schema.simple_config["device_supervisor"]["clouds"]: + if "uploadRules" in self.schema.simple_config["device_supervisor"]["clouds"][i]: + upload_rules = self.schema.simple_config["device_supervisor"]["clouds"][i]["uploadRules"] + for value in upload_rules: + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + for i in self.schema.simple_config["device_supervisor"]["mindspheres"]: + for mindsphereput in self.schema.simple_config["device_supervisor"]["mindspheres"][i]["mindsphereputs"]: + if "uploadRules" in mindsphereput: + upload_rules = mindsphereput["uploadRules"] + for value in upload_rules: + if value["ctrlName"] == old_ctrlName and old_ctrlName != new_ctrlName: + value["ctrlName"] = new_ctrlName + + def modify_measure_params_sync_config(self, old_meaName, mea): + """ + 修改一个测点的测点名称,数据类型,读写权限,需要同步以下信息: + 1. alarms + 2. modbusSlave mapping_table + 3. modbusRTUSlave mapping_table + 4. iec104Server mapping_table + 5. iec101Server mapping_table + 6. opcuaServer mapping_table + 7. iec104Client mapping_table + 8. cloud uploadRules + 9. bindCfg variables??? + 10. bindCfg alerts??? + 11. mindsphereput uploadRules + """ + schemaAlarm = self.schema.simple_config["device_supervisor"]["alarms"] + for key, value in list(schemaAlarm.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] == "STRING": + schemaAlarm.pop(key) + self.clear_invaild_config_by_alarm(value) + + schemaModbusSlave = self.schema.simple_config["device_supervisor"]["modbusSlave"]["mappingTable"] + for mappingTableKey, mappingTableValue in list(schemaModbusSlave.items()): + for key, value in list(mappingTableValue["measures"].items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + mappingTableValue["measures"].pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if old_mea["name"] == old_meaName and old_mea["dataType"] == mea["dataType"] and mea["dataType"] == "STRING": + if mea["len"] != old_mea["len"]: + mappingTableValue["measures"].pop(key) + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, + Ulong, Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + mappingTableValue["measures"].pop(key) + + schemaModbusRTUSlave = self.schema.simple_config["device_supervisor"]["modbusRTUSlave"]["mappingTable"] + for mappingTableKey, mappingTableValue in list(schemaModbusRTUSlave.items()): + for key, value in list(mappingTableValue["measures"].items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + mappingTableValue["measures"].pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if old_mea["name"] == old_meaName and old_mea["dataType"] == mea["dataType"] and mea["dataType"] == "STRING": + if mea["len"] != old_mea["len"]: + mappingTableValue["measures"].pop(key) + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, + Ulong, Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + mappingTableValue["measures"].pop(key) + + schemaIec104Server = self.schema.simple_config["device_supervisor"]["iec104Server"]["mapping_table"] + for key, value in list(schemaIec104Server.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + schemaIec104Server.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + schemaIec104Server.pop(key) + + iec101Server_YX = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YX"] + for key, value in list(iec101Server_YX.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + iec101Server_YX.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + iec101Server_YX.pop(key) + + iec101Server_YC = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YC"] + for key, value in list(iec101Server_YC.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + iec101Server_YC.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + iec101Server_YC.pop(key) + + iec101Server_YK = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YK"] + for key, value in list(iec101Server_YK.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + iec101Server_YK.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + iec101Server_YK.pop(key) + + schemaOpcuaServer = self.schema.simple_config["device_supervisor"]["opcuaServer"]["mapping_table"] + for key, value in list(schemaOpcuaServer.items()): + if value["ctrlName"] == mea["ctrlName"] and value["measureName"] == old_meaName and not mea["dataType"] != value["dataType"]: + if mea["readWrite"] != value["readWrite"]: + schemaOpcuaServer.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + schemaOpcuaServer.pop(key) + + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if self.schema.simple_config["device_supervisor"]["opcuaServer"]["config"]["identifierType"] == "String": + ns = value["nodeId"].split(";")[0] + s = value["nodeId"].split(";")[1] + s_value = s.split("=")[1] + s_value_ctrl = s_value.split(".")[0] + value["nodeId"] = ns + ";s=" + s_value_ctrl + "." + value["measureName"] + + schemaSl651Slave = self.schema.simple_config["device_supervisor"]["sl651Slave"]["mapping_table"] + for key, value in list(schemaSl651Slave.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + schemaSl651Slave.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if old_mea["name"] == old_meaName and old_mea["dataType"] == mea["dataType"] and mea["dataType"] == "STRING": + if mea["len"] != old_mea["len"]: + schemaSl651Slave.pop(key) + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + schemaSl651Slave.pop(key) + + schemahj212Client = self.schema.simple_config["device_supervisor"]["hj212Client"]["mapping_table"] + for key, value in list(schemahj212Client.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + schemahj212Client.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if old_mea["name"] == old_meaName and old_mea["dataType"] == mea["dataType"] and mea["dataType"] == "STRING": + if mea["len"] != old_mea["len"]: + schemahj212Client.pop(key) + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + schemahj212Client.pop(key) + + schemaBacnetServer = self.schema.simple_config["device_supervisor"]["bacnetServer"]["mapping_table"] + for key, value in list(schemaBacnetServer.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + schemaBacnetServer.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if old_mea["name"] == old_meaName and old_mea["dataType"] == mea["dataType"] and mea["dataType"] == "STRING": + if mea["len"] != old_mea["len"]: + schemaBacnetServer.pop(key) + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + schemaBacnetServer.pop(key) + + schemaDnp3Server = self.schema.simple_config["device_supervisor"]["Dnp3Server"]["mapping_table"] + for key, value in list(schemaDnp3Server.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + schemaDnp3Server.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if old_mea["name"] == old_meaName and old_mea["dataType"] == mea["dataType"] and mea["dataType"] == "STRING": + if mea["len"] != old_mea["len"]: + schemaDnp3Server.pop(key) + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + schemaDnp3Server.pop(key) + + schemaiec61850Server = self.schema.simple_config["device_supervisor"]["iec61850Server"]["mapping_table"] + for key, value in list(schemaiec61850Server.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + schemaiec61850Server.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if old_mea["name"] == old_meaName and old_mea["dataType"] == mea["dataType"] and mea["dataType"] == "STRING": + if mea["len"] != old_mea["len"]: + schemaiec61850Server.pop(key) + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + schemaiec61850Server.pop(key) + + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + LdName = value["daRef"].split(".", 2)[0] + other = value["daRef"].split(".", 2)[2] + value["daRef"] = LdName + "." + mea["name"] + "." + other + + schemaSnmpAgent = self.schema.simple_config["device_supervisor"]["snmpAgent"]["mapping_table"] + for key, value in list(schemaSnmpAgent.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + schemaSnmpAgent.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if old_mea["name"] == old_meaName and old_mea["dataType"] == mea["dataType"] and mea["dataType"] == "STRING": + if mea["len"] != old_mea["len"]: + schemaSnmpAgent.pop(key) + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + schemaSnmpAgent.pop(key) + + iec104Client_YX = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YX"] + for key, value in list(iec104Client_YX.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + iec104Client_YX.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + iec104Client_YX.pop(key) + + iec104Client_YC = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YC"] + for key, value in list(iec104Client_YC.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + iec104Client_YC.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + iec104Client_YC.pop(key) + + iec104Client_YK = self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"]["YK"] + for key, value in list(iec104Client_YK.items()): + if value["ctrlName"] == mea["ctrlName"]: + if value["measureName"] == old_meaName: + if mea["name"] != old_meaName: + value["measureName"] = mea["name"] + if mea["dataType"] != value["dataType"] or mea["readWrite"] != value["readWrite"]: + iec104Client_YK.pop(key) + schemaMeasure = self.schema.simple_config["device_supervisor"]["measures"][mea["ctrlName"]] + for old_mea in list(schemaMeasure.values()): + if mea["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long] and old_meaName == old_mea["name"] and mea["enableBit"] != old_mea["enableBit"]: + iec104Client_YK.pop(key) + + for i in self.schema.simple_config["device_supervisor"]["clouds"]: + if "uploadRules" in self.schema.simple_config["device_supervisor"]["clouds"][i]: + upload_rules = self.schema.simple_config["device_supervisor"]["clouds"][i]["uploadRules"] + for value in upload_rules: + if value["ctrlName"] == mea["ctrlName"] and value["measureName"] == old_meaName and mea["name"] != old_meaName: + value["measureName"] = mea["name"] + + for i in self.schema.simple_config["device_supervisor"]["mindspheres"]: + for mindsphereput in self.schema.simple_config["device_supervisor"]["mindspheres"][i]["mindsphereputs"]: + if "uploadRules" in mindsphereput: + upload_rules = mindsphereput["uploadRules"] + for value in upload_rules: + if value["ctrlName"] == mea["ctrlName"] and value["measureName"] == old_meaName and mea["name"] != old_meaName: + value["measureName"] = mea["name"] + + def clear_invaild_config_by_alarm(self, ala): + """ + 删除一个告警,需要同步以下信息: + 1. bindCfg alerts + """ + bindAlert = self.ucfg.bindConfig["alerts"] + for alert in bindAlert: + if ala["name"] == alert["alarmName"]: + self.ucfg.bindConfig["alerts"].remove(alert) + + def delete_uploadFuncs_alarm_list(self, upload_alarms, del_alarms): + for ala in del_alarms: + if ala in upload_alarms: + upload_alarms.remove(ala) + + return upload_alarms + + def clear_invaild_config_by_alarmLable(self, del_alalab): + uploadFuncs = self.schema.simple_config["device_supervisor"]["quickfaas"]["uploadFuncs"] + for _, cloud_uploadFuncs in list(uploadFuncs.items()): + for item in list(cloud_uploadFuncs): + if item["trigger"] == "warning_event": + self.delete_uploadFuncs_alarm_list(item["alarms"], del_alalab) or cloud_uploadFuncs.remove(item) + + def delete_and_change_hj212_block_to_syn_config(self, hj212): + if "config" not in hj212: + return + schemaiechj212Client = self.schema.simple_config["device_supervisor"]["hj212Client"] + for id, value in hj212["config"]["block_list"].items(): + if id not in schemaiechj212Client["config"]["block_list"]: + continue + old_name = schemaiechj212Client["config"]["block_list"][id]["name"] + if value is None: + for key, table_value in list(schemaiechj212Client["mapping_table"].items()): + if old_name == table_value["block"]: + schemaiechj212Client["mapping_table"].pop(key) + + elif value["name"] != old_name: + new_name = value["name"] + for table_value in list(schemaiechj212Client["mapping_table"].values()): + if old_name == table_value["block"]: + table_value["block"] = new_name + + def delete_104_asdu_to_syn_config(self, iec104): + if "config" not in iec104: + return + asdu_list = list() + schemaiec104Server = self.schema.simple_config["device_supervisor"]["iec104Server"] + for asdu in iec104["config"]["serverList"]: + asdu_list.append(asdu["asduAddr"]) + + for key, value in list(schemaiec104Server["mapping_table"].items()): + if value["asduAddr"] not in asdu_list: + schemaiec104Server["mapping_table"].pop(key) + + def delete_101_asdu_to_syn_config(self, iec101): + if "config" not in iec101: + return + asdu_list = list() + for asdu in iec101["config"]["serverList"]: + asdu_list.append(asdu["asduAddr"]) + + iec101Server_YC = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YC"] + for key, value in list(iec101Server_YC.items()): + if value["asduAddr"] not in asdu_list: + iec101Server_YC.pop(key) + + iec101Server_YX = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YX"] + for key, value in list(iec101Server_YX.items()): + if value["asduAddr"] not in asdu_list: + iec101Server_YX.pop(key) + + iec101Server_YK = self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"]["YK"] + for key, value in list(iec101Server_YK.items()): + if value["asduAddr"] not in asdu_list: + iec101Server_YK.pop(key) + + def _clear_quickfaas_func(self, cloud_name): + uploadFuncs = self.schema.simple_config["device_supervisor"]["quickfaas"]["uploadFuncs"] + for name, _ in uploadFuncs.items(): + if name == cloud_name: + uploadFuncs[name].clear() + + downloadFuncs = self.schema.simple_config["device_supervisor"]["quickfaas"]["downloadFuncs"] + for name, _ in downloadFuncs.items(): + if name == cloud_name: + downloadFuncs[name].clear() + + def labels_to_syn_config(self, labels, labels_new): + simple_labels = self.schema.simple_config["device_supervisor"]["labels"] + for id in labels_new.keys(): + if not id in simple_labels or simple_labels[id]["key"] == "MAC" or simple_labels[id]["key"] == "SN": + labels_new[id] = simple_labels[id] + + def switch_cloud_to_syn_config(self, clouds, clouds_new): + for id, cloud in clouds.items(): + if cloud is None: + if id in self.schema.simple_config["device_supervisor"]["clouds"]: + simple_cloud = self.schema.simple_config["device_supervisor"]["clouds"][id] + if simple_cloud["type"] in [CloudTypeErlang, CloudTypeBaiYing]: + self.erlang.clear_bind_info() + self.erlang.disconnect_erlang(simple_cloud) + else: + self._clear_quickfaas_func(simple_cloud["name"]) + else: + if cloud["enable"]: + if id in clouds_new: + self.get_rert_content(clouds_new[id]) + if cloud["type"] in [CloudTypeErlang, CloudTypeBaiYing]: + self.erlang.reconnect_erlang(cloud) + elif id in self.schema.simple_config["device_supervisor"]["clouds"]: + simple_cloud = self.schema.simple_config["device_supervisor"]["clouds"][id] + if cloud["type"] != simple_cloud["type"]: + if simple_cloud["type"] in [CloudTypeErlang, CloudTypeBaiYing]: + self.erlang.clear_bind_info() + if cloud["type"] not in [CloudTypeErlang, CloudTypeBaiYing]: + self.erlang.disconnect_erlang(simple_cloud) + else: + self._clear_quickfaas_func(simple_cloud["name"]) + else: + if 0 == cloud["enable"]: + if simple_cloud["type"] in [CloudTypeErlang, CloudTypeBaiYing]: + self.erlang.clear_bind_info() + self.erlang.disconnect_erlang(simple_cloud) + else: + self.schema.simple_config["device_supervisor"]["quickfaas"]["uploadFuncs"][cloud["name"]] = list() + self.schema.simple_config["device_supervisor"]["quickfaas"]["downloadFuncs"][cloud["name"]] = list() + + def get_rert_contentParse error at or near `COME_FROM' instruction at offset 980_0 + + def on_import_fileParse error at or near `JUMP_FORWARD' instruction at offset 1024 + + def import_measure_csv(self, ctr_name, filename, topic): + schema_cons = self.schema.simple_config["device_supervisor"]["controllers"] + controllers = list(schema_cons.values()) + ind = self.verify_kv_and_get_index(controllers, "name", ctr_name) + if ind is None: + logger.warn("The controller(%s) does not exist" % ctr_name) + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11014])) + return self.build_response_data(data, topic) + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + count_line = 1 + measure_news = list() + self.names = list() + for val in csv_reader: + count_line += 1 + error_val = self.verify_measure_post(ctr_name, val) + if error_val != NONE: + logger.warn("import measure csv error: invalid %s" % error_val) + return self.response_error_params(count_line, error_val, topic, -11008) + val_new = self.transfer_measure_csv_headers(ctr_name, val) + measure_news.append(val_new) + + if len(measure_news) == 0: + logger.warn("import measure csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + measure_olds = list() + if ctr_name in self.schema.simple_config["device_supervisor"]["measures"]: + measure_olds = list(self.schema.simple_config["device_supervisor"]["measures"][ctr_name].values()) + measures_dels = [x for x in measure_olds if x not in measure_news] + for mea in measures_dels: + self.clear_invaild_config_by_measure(mea) + + simple_config = dict() + if ctr_name in self.schema.simple_config["device_supervisor"]["measures"]: + self.schema.simple_config["device_supervisor"]["measures"][ctr_name].clear() + simple_config_map_array(measure_news, simple_config, ctr_name, self.schema.simple_config_indexs, "name") + self.schema.simple_config["device_supervisor"]["measures"][ctr_name] = simple_config[ctr_name] + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_group_csv(self, filename, topic): + count_line = 1 + names = list() + group_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for g in csv_reader: + val = dict() + if "_Id" in g: + val["_id"] = g["_Id"] + else: + val["name"] = g["GroupName"] + val["uploadInterval"] = int(g["UploadInterval"]) + count_line += 1 + if not val["name"] or "/" in val["name"] or "." in val["name"]: + logger.warn("import group csv error: invalid GroupName: %s" % val["name"]) + return self.response_error_params(count_line, "GroupName have '/' or '.': " + val["name"], topic, -11008) + if len(str(val["name"])) > 256: + logger.warn("import group csv error: invalid GroupName: %s" % val["name"]) + return self.response_error_params(count_line, "GroupName len > 256: " + val["name"], topic, -11008) + else: + sp = val["uploadInterval"] + if not sp < 1: + if sp > 3600: + logger.warn("import group csv error: invalid uploadInterval: %s" % str(sp)) + return self.response_error_params(count_line, "uploadInterval: " + str(sp), topic, -11008) + if "EnablePerOnchange" not in g: + val["enablePerOnchange"] = 0 + else: + val["enablePerOnchange"] = int(g["EnablePerOnchange"]) + if val["enablePerOnchange"] not in (0, 1): + logger.warn("import group csv error: invalid enablePerOnchange: %s" % str(val["enablePerOnchange"])) + return self.response_error_params(count_line, "enablePerOnchange: " + str(val["enablePerOnchange"]), topic, -11008) + if val["enablePerOnchange"] == 1: + val["onchangePeriod"] = int(g["OnchangePeriod"]) + if not val["onchangePeriod"] < 1: + if val["onchangePeriod"] > 86400: + logger.warn("import group csv error: invalid OnchangePeriod: %s" % str(val["onchangePeriod"])) + return self.response_error_params(count_line, "OnchangePeriod: " + str(val["onchangePeriod"]), topic, -11008) + if "LwTSDBSize" not in g: + val["LwTSDBSize"] = 150000 + val["LwTSDBSize"] = int(g["LwTSDBSize"]) + if not val["LwTSDBSize"] < 1: + if val["LwTSDBSize"] > 150000: + logger.warn("import group csv error: invalid LwTSDBSize: %s" % str(val["LwTSDBSize"])) + return self.response_error_params(count_line, "LwTSDBSize: " + str(val["LwTSDBSize"]), topic, -11008) + if "strategy" not in g: + val["strategy"] = 1 + else: + val["strategy"] = int(g["strategy"]) + if val["strategy"] not in (1, 2): + logger.warn("import group csv error: invalid strategy: %s" % str(val["strategy"])) + return self.response_error_params(count_line, "strategy: " + str(val["strategy"]), topic, -11008) + if val["strategy"] == 2: + if "storagePeriod" in g: + val["storagePeriod"] = int(g["storagePeriod"]) + if val["storagePeriod"] < 1 or val["storagePeriod"] > 86400: + logger.warn("import group csv error: invalid storagePeriod: %s" % str(val["storagePeriod"])) + return self.response_error_params(count_line, "storagePeriod: " + str(val["storagePeriod"]), topic, -11008) + else: + logger.warn("import group csv error: strategy is 2 but not storagePeriod") + return self.response_error_params(count_line, "strategy is 2 but not storagePeriod", topic, -11008) + elif "historyDataMode" not in g or "historyDataPath" not in g: + if "historyDataPath" in g and "/mnt/usb/" in g["historyDataPath"]: + val["historyDataMode"] = "usb" + else: + if "historyDataPath" in g and "/mnt/sd/" in g["historyDataPath"]: + val["historyDataMode"] = "sd" + else: + val["historyDataMode"] = "gateway" + val["historyDataPath"] = "/var/user/data/dbhome/device_supervisor/LwTSDB" + else: + val["historyDataMode"] = g["historyDataMode"] + val["historyDataPath"] = g["historyDataPath"] + if val["historyDataMode"] not in ('usb', 'sd', 'ssd', 'gateway'): + logger.warn("import group csv error: invalid historyDataMode: %s" % val["historyDataMode"]) + return self.response_error_params(count_line, "historyDataMode: " + val["historyDataMode"], topic, -11008) + name = val["name"] + if not name or name in names: + logger.warn("import group csv error: invalid name: %s" % name) + return self.response_error_params(count_line, "name: " + val["name"], topic, -11008) + names.append(name) + group_news.append(val) + + if len(group_news) == 0: + logger.warn("import group csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + gr = dict() + schema_grs = self.schema.simple_config["device_supervisor"]["groups"] + for gr_id, group in schema_grs.items(): + if group["name"] not in names: + gr.update({gr_id: None}) + + self.delete_groups_to_syn_config(gr) + simple_config = dict() + self.schema.simple_config["device_supervisor"]["groups"].clear() + simple_config_map_array(group_news, simple_config, "groups", self.schema.simple_config_indexs, "name") + self.schema.simple_config["device_supervisor"]["groups"] = simple_config["groups"] + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_opcua_pubsub_group_csv(self, filename, topic): + count_line = 1 + group_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for g in csv_reader: + val = dict() + val["group_type"] = g["GroupType"] + val["group_id"] = int(g["GroupId"]) + count_line += 1 + if not val["group_type"] or val["group_type"] not in ('pub', 'sub'): + logger.warn("import opcua_pubsub_group csv error: invalid group_type: %s" % val["group_type"]) + return self.response_error_params(count_line, "group_type: " + val["group_type"], topic, -11008) + sp = val["group_id"] + if not sp < 0: + if sp > 65535: + logger.warn("import opcua_pubsub_group csv error: invalid group_id: %s" % str(sp)) + return self.response_error_params(count_line, "group_id: " + str(sp), topic, -11008) + grp = {'group_type':val["group_type"], + 'group_id':val["group_id"]} + if not grp or grp in group_news: + logger.warn("import opcua_pubsub_group csv error: invalid name: %s %s" % ( + val["group_type"], str(val["group_id"]))) + return self.response_error_params(count_line, "name: " + val["group_type"] + str(val["group_id"]), topic, -11008) + group_news.append(grp) + + if len(group_news) == 0: + logger.warn("import opcua_pubsub_group csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + gr = dict() + schema_grs = self.schema.simple_config["device_supervisor"]["opcuaServer"]["pubsub_group"] + for gr_id, group in schema_grs.items(): + have_old = 0 + for group2 in group_news: + if group["group_type"] == group2["group_type"] and group["group_type"] == group2["group_type"]: + have_old = 1 + break + + if have_old != 1: + gr.update({gr_id: None}) + + self.delete_opcua_pubsub_group_to_syn_config(gr) + simple_config = dict() + self.schema.simple_config["device_supervisor"]["opcuaServer"]["pubsub_group"].clear() + simple_config_map_array(group_news, simple_config, "pubsub_group", self.schema.simple_config_indexs, "group_id") + self.schema.simple_config["device_supervisor"]["opcuaServer"]["pubsub_group"] = simple_config["pubsub_group"] + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_alarm_csv(self, filename, topic): + schema_alas = self.schema.simple_config["device_supervisor"]["alarms"] + alarms_olds = list(schema_alas.values()) + count_line = 1 + names = list() + alarm_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for alm in csv_reader: + val = dict() + count_line += 1 + if len(str(alm["AlarmName"])) > 256: + logger.warn("import alarm csv error: invalid AlarmName: %s" % alm["AlarmName"]) + return self.response_error_params(count_line, "AlarmName > 256: " + alm["AlarmName"], topic, -11008) + if not "." in alm["ControllerName"]: + if "/" in alm["ControllerName"]: + logger.warn("import alarm csv error: invalid ControllerName: %s" % alm["ControllerName"]) + return self.response_error_params(count_line, "ControllerName have '.' or '/': " + alm["ControllerName"], topic, -11008) + ctr = self.find_ctl_name(alm["ControllerName"]) + if not ctr: + logger.warn("import alarm csv error: invalid ControllerName: %s" % alm["ControllerName"]) + return self.response_error_params(count_line, "ControllerName: " + alm["ControllerName"], topic, -11008) + mea = self.find_measure_by_name(alm["ControllerName"], alm["MeasuringPointName"]) + if not mea: + logger.warn("import alarm csv error: invalid MeasuringPointName: %s" % alm["MeasuringPointName"]) + return self.response_error_params(count_line, "MeasuringPointName: " + alm["MeasuringPointName"], topic, -11008) + if "_Id" in alm: + val["_id"] = alm["_Id"] + val["name"] = alm["AlarmName"] + val["ctrlName"] = alm["ControllerName"] + val["measureName"] = alm["MeasuringPointName"] + val["alarmLevel"] = int(alm["AlarmLevel"]) + val["cond1"] = dict() + val["cond1"]["op"] = alm["Condition1"] + val["cond1"]["value"] = alm["Operand1"] + val["condOp"] = alm["CombineMethod"] + val["cond2"] = dict() + val["cond2"]["op"] = alm["Condition2"] + val["cond2"]["value"] = alm["Operand2"] + val["content"] = alm["AlarmContent"] + val["alarmLable"] = alm["AlarmTag"] + name = val["name"] + measure = val["measureName"] + ind = self.verify_kv_and_get_index(self.ucfg.measures, "name", measure) + if ind is None: + logger.warn("import alarm csv error: invalid MeasuringPointName: %s" % val["measureName"]) + return self.response_error_params(count_line, "MeasuringPointName: " + val["measureName"], topic, -11008) + if not name or name in names: + logger.warn("import alarm csv error: invalid AlarmName: %s" % name) + return self.response_error_params(count_line, "AlarmName: " + name, topic, -11008) + names.append(name) + error_val = self.verify_alarm_post(val) + if error_val != NONE: + logger.warn("import alarm csv error: invalid %s" % error_val) + return self.response_error_params(count_line, error_val, topic, -11008) + alarm_news.append(val) + + if len(alarm_news) == 0: + logger.warn("import alarm csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + alarms_dels = [x for x in alarms_olds if x not in alarm_news] + for ala in alarms_dels: + self.clear_invaild_config_by_alarm(ala) + + simple_config = dict() + self.schema.simple_config["device_supervisor"]["alarms"].clear() + simple_config_map_array(alarm_news, simple_config, "alarms", self.schema.simple_config_indexs, "name") + self.schema.simple_config["device_supervisor"]["alarms"] = simple_config["alarms"] + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_alarmLables_csv(self, filename, topic): + count_line = 1 + names = list() + alarm_label = list() + alarmLable_news = list() + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = csv.DictReader(f, delimiter=",") + for alalab in csv_reader: + count_line += 1 + name = alalab["AlarmLableName"] + if len(str(name)) > 256: + logger.warn("import alarmLables csv error: invalid AlarmLableName: %s" % name) + return self.response_error_params(count_line, "AlarmLableName > 256: " + name, topic, -11008) + if not name or name in names: + logger.warn("import alarmLables csv error: invalid AlarmLableName: %s" % name) + return self.response_error_params(count_line, "AlarmLableName: " + name, topic, -11008) + names.append(name) + alarmLable_news.append(alalab["AlarmLableName"]) + + if len(alarmLable_news) == 0: + logger.warn("import alarmLables csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + alarmLable_olds = self.schema.simple_config["device_supervisor"]["alarmLables"] + for alalab in alarmLable_olds: + if alalab in names: + alarm_label.append(alalab) + + self.delete_alarmLables_to_syn_config(alarm_label) + self.schema.simple_config["device_supervisor"]["alarmLables"].clear() + self.schema.simple_config["device_supervisor"]["alarmLables"] = alarmLable_news + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_cloud_csv(self, filename, topic, cld_name): + pubs = list() + subs = list() + import_cloud = dict() + with open(filename, "r", encoding="utf-8-sig") as f: + info = json.load(f) + if "." in cld_name or "/" in cld_name: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "Cloud Name: " + cld_name + logger.warn("import cloud csv error: invalid %s" % data["params"]) + return self.build_response_data(data, topic) + import_cloud = info["cloud"] + if "name" not in import_cloud: + import_cloud["name"] = cld_name + self._verify_cld_put(import_cloud) + self.update_cert_by_content(import_cloud) + if import_cloud["type"] not in [CloudTypeErlang, CloudTypeBaiYing]: + pubs = info["quickfaas"]["uploadFuncs"] + for pub in pubs: + if "cloudName" not in pub: + pub["cloudName"] = cld_name + if "hideOfflineData" not in pub: + pub["hideOfflineData"] = 0 + res_data = self.verify_pub_post(import_cloud["type"], pub) + if res_data and "error" in res_data: + logger.warn("import cloud csv error: invalid %s" % res_data["params"]) + return self.build_response_data(res_data, topic) + + subs = info["quickfaas"]["downloadFuncs"] + for sub in subs: + if "cloudName" not in sub: + sub["cloudName"] = cld_name + res_data = self.verify_sub_post(import_cloud["type"], sub) + if res_data and "error" in res_data: + logger.warn("import cloud csv error: invalid %s" % res_data["params"]) + return self.build_response_data(res_data, topic) + + for id, cloud in self.schema.simple_config["device_supervisor"]["clouds"].items(): + if cloud["name"] == cld_name: + import_cloud["_id"] = id + self.schema.simple_config["device_supervisor"]["clouds"].update({id: import_cloud}) + if import_cloud["type"] not in [CloudTypeErlang, CloudTypeBaiYing]: + if cloud["type"] in [CloudTypeErlang, CloudTypeBaiYing]: + self.erlang.send_connect_info_to_erlang_agent(import_cloud, None, 0) + elif import_cloud["enable"] == 1: + self.erlang.send_connect_info_to_erlang_agent(import_cloud, None, 1) + else: + self.erlang.send_connect_info_to_erlang_agent(import_cloud, False, 1) + break + + self.schema.simple_config["device_supervisor"]["quickfaas"]["uploadFuncs"].update({cld_name: pubs}) + self.schema.simple_config["device_supervisor"]["quickfaas"]["downloadFuncs"].update({cld_name: subs}) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_cloud_measure_csv(self, filename, topic, cld_name): + clouds = list(self.schema.simple_config["device_supervisor"]["clouds"].values()) + ind = self.verify_kv_and_get_index(clouds, "name", cld_name) + if ind is None: + logger.warn("The clould(%s) does not exist" % cld_name) + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11014])) + return self.build_response_data(data, topic) + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + count_line = 1 + rule_news = list() + uploadNames = list() + for val in csv_reader: + count_line += 1 + rule_new = dict() + if not "." in val["ControllerName"]: + if "/" in val["ControllerName"]: + logger.warn("import cloud measure csv error: invalid ControllerName: %s" % val["ControllerName"]) + return self.response_error_params(count_line, "ControllerName have '.' or '/': " + val["ControllerName"], topic, -11008) + ctr = self.find_ctl_name(val["ControllerName"]) + if not ctr: + logger.warn("import cloud measure csv error: invalid ControllerName: %s" % val["ControllerName"]) + return self.response_error_params(count_line, "ControllerName: " + val["ControllerName"], topic, -11008) + mea = self.find_measure_by_name(val["ControllerName"], val["MeasuringPointName"]) + if not mea: + logger.warn("import cloud measure csv error: invalid MeasuringPointName: %s" % val["MeasuringPointName"]) + return self.response_error_params(count_line, "MeasuringPointName: " + val["MeasuringPointName"], topic, -11008) + rule_new["ctrlName"] = val["ControllerName"] + rule_new["measureName"] = val["MeasuringPointName"] + rule_new["uploadName"] = val["UploadName"] + rule_new["hide"] = int(val["Hide"]) + uploadName = rule_new["ctrlName"] + rule_new["uploadName"] + measure = rule_new["measureName"] + ind = self.verify_kv_and_get_index(self.ucfg.measures, "name", measure) + if ind is None: + logger.warn("import cloud measure csv error: invalid MeasuringPointName: %s" % rule_new["measureName"]) + return self.response_error_params(count_line, "MeasuringPointName: " + rule_new["measureName"], topic, -11008) + if val["GroupName"] != mea["group"]: + logger.warn("import cloud measure csv error: invalid GroupName: %s" % val["GroupName"]) + return self.response_error_params(count_line, "GroupName: " + val["GroupName"], topic, -11008) + if rule_new["hide"] != 0: + if rule_new["hide"] != 1: + logger.warn("import cloud measure csv error: invalid Hide: %s" % val["Hide"]) + return self.response_error_params(count_line, "Hide: " + val["Hide"], topic, -11008) + if not uploadName or uploadName in uploadNames: + logger.warn("import cloud measure csv error: invalid UploadName: %s" % uploadName) + return self.response_error_params(count_line, "UploadName: " + uploadName, topic, -11008) + uploadNames.append(uploadName) + rule_news.append(rule_new) + + for cloud in clouds: + if cloud["name"] == cld_name: + break + + cloud["uploadRules"].clear() + cloud["uploadRules"] = rule_news + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_modbus_slave_csv(self, filename, topic, mappingTableName, is_modbusRTU=False): + count_line = 1 + val_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + con_list = list() + addr_list = list() + for mp in csv_reader: + min_max = [ + None, None] + count_line += 1 + val = dict() + if "." in mp["Controller Name"] or "/" in mp["Controller Name"]: + logger.warn("import modbus slave csv error: invalid ControllerName: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name have '.' or '/': " + mp["Controller Name"], topic, -11008) + ctr = self.find_ctl_name(mp["Controller Name"]) + if not ctr: + logger.warn("import modbus slave csv error: invalid Controller Name: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name: " + mp["Controller Name"], topic, -11008) + mea = self.find_measure_by_name(mp["Controller Name"], mp["Measuring Point Name"]) + if not mea: + logger.warn("import modbus slave csv error: invalid Measuring Point Name: %s" % mp["Measuring Point Name"]) + return self.response_error_params(count_line, "Measuring Point Name: " + mp["Measuring Point Name"], topic, -11008) + if "_Id" in mp: + val["_id"] = mp["_Id"] + else: + mea_name = mea["name"] + ctl_name = ctr["name"] + data_type = mp["Data Type"] + start_addr = mp["Start Mapping Address"] + val["measureName"] = mea_name + val["ctrlName"] = ctl_name + val["readWrite"] = mp["readWrite"] + if data_type in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long]: + if "EnableBit" not in mp: + val["enableBit"] = mea["enableBit"] + else: + if int(mp["EnableBit"]) != mea["enableBit"]: + logger.warn("import modbus slave csv error: invalid EnableBit: %s" % str(mp["EnableBit"])) + return self.response_error_params(count_line, "EnableBit: " + str(mp["EnableBit"]), topic, -11008) + val["enableBit"] = int(mp["EnableBit"]) + elif "enableBit" in val: + enableBit = val["enableBit"] + else: + enableBit = 0 + val["dataType"] = data_type + if "Mapping Data Type" in mp: + north_data_type = mp["Mapping Data Type"] + else: + if "enableBit" in val: + if val["enableBit"] == 1: + north_data_type = "BIT" + else: + north_data_type = data_type + else: + north_data_type = data_type + val["northDataType"] = north_data_type + if ctl_name not in con_list: + con_list.append(ctl_name) + if "X" in start_addr: + ind = start_addr.index("X") + if ind >= 1: + funcode = start_addr[ind - 1] + if funcode not in ('0', '1', '3', '4'): + logger.warn("import modbus slave csv error: invalid Start Mapping Address: %s" % start_addr) + return self.response_error_params(count_line, "Start Mapping Address: " + start_addr, topic, -11008) + if funcode in ('3', '4'): + if enableBit == 1: + logger.warn("import modbus slave csv error: invalid EnableBit: %s" % str(enableBit)) + return self.response_error_params(count_line, "EnableBit is 1 but func in [3, 4]", topic, -11008) + addr = start_addr[(ind + 1)[:None]] + if addr.isdigit(): + if int(addr) < 0 or int(addr) > 65535: + logger.warn("import modbus slave csv error: invalid Start Mapping Address: %s" % start_addr) + return self.response_error_params(count_line, "Start Mapping Address: " + start_addr, topic, -11008) + else: + logger.warn("import modbus slave csv error: invalid Start Mapping Address: %s" % start_addr) + return self.response_error_params(count_line, "Start Mapping Address: " + start_addr, topic, -11008) + else: + logger.warn("import modbus slave csv error: invalid Start Mapping Address: %s" % start_addr) + return self.response_error_params(count_line, "Start Mapping Address: " + start_addr, topic, -11008) + start_addr = addr + val["startMapAddr"] = start_addr + for vn in val_news: + if vn["ctrlName"] == val["ctrlName"] and vn["measureName"] == val["measureName"]: + logger.warn("import modbus slave csv error: invalid measureName: %s" % val["measureName"]) + return self.response_error_params(count_line, "Repeated measureName: " + val["measureName"], topic, -11008) + + end_addr = self.computing_end_addr(ctl_name, mea_name, funcode, start_addr, north_data_type, enableBit, count_line, topic) + if end_addr is None: + return + error_val = self.verify_modbus_import_params(val, funcode) + if error_val != NONE: + logger.warn("import modbus slave csv error: invalid: %s" % error_val) + return self.response_error_params(count_line, error_val, topic, -11008) + addr_len = len(str(start_addr)) + if addr_len < 4: + start_addr = start_addr.zfill(4) + start_addr = funcode + start_addr + val["startMapAddr"] = start_addr + addr_len = len(str(end_addr)) + if addr_len < 4: + end_addr = end_addr.zfill(4) + end_addr = funcode + end_addr + val["endMapAddr"] = end_addr + min_max = [ + start_addr, end_addr] + for current_addr in addr_list: + if not current_addr[0] <= start_addr <= current_addr[1]: + if current_addr[0] <= end_addr <= current_addr[1]: + pass + logger.warn("import modbus slave csv error: invalid Overlapping start_addr: %s" % start_addr) + return self.response_error_params(count_line, "Overlapping start_addr: " + start_addr, topic, -11008) + + addr_list.append(min_max) + val_news.append(val) + + if len(val_news) == 0: + logger.warn("Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + else: + simple_config = dict() + if is_modbusRTU: + mappingTableKey, mappingTable = self.find_modbusSlave_mappingTable_by_name(mappingTableName, True) + if not (mappingTable and mappingTableKey): + logger.warn("import modbus slave csv error: invalid mappingTableName: %s" % mappingTableName) + return self.response_error_params(count_line, "mappingTableName: " + mappingTableName, topic, -11008) + simple_config["modbusRTUSlave"] = dict() + simple_config["modbusRTUSlave"]["mappingTable"] = self.schema.simple_config["device_supervisor"]["modbusRTUSlave"]["mappingTable"] + simple_config["modbusRTUSlave"]["mappingTable"][mappingTableKey] = {'name':mappingTable["name"], 'slaveAddr':mappingTable["slaveAddr"], + '_id':mappingTable["_id"], + 'measures':(collections.OrderedDict)()} + device_supervisor = self.schema.simple_config["device_supervisor"] + device_supervisor["modbusRTUSlave"]["mappingTable"][mappingTableKey]["measures"].clear() + for i in range(0, len(val_news)): + mea = val_news[i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["modbusRTUSlave"]["mappingTable"][mappingTableKey]["measures"][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="modbusRTUSlaveMappingTable", key=(mea["measureName"]), + value=mea_key) + + else: + mappingTableKey, mappingTable = self.find_modbusSlave_mappingTable_by_name(mappingTableName, False) + mappingTable and mappingTableKey or logger.warn("import modbus RTU slave csv error: invalid mappingTableName: %s" % mappingTableName) + return self.response_error_params(count_line, "mappingTableName: " + mappingTableName, topic, -11008) + simple_config["modbusSlave"] = dict() + simple_config["modbusSlave"]["mappingTable"] = collections.OrderedDict() + simple_config["modbusSlave"]["mappingTable"][mappingTableKey] = {'name':mappingTable["name"], 'slaveAddr':mappingTable["slaveAddr"], + 'measures':(collections.OrderedDict)()} + self.schema.simple_config["device_supervisor"]["modbusSlave"]["mappingTable"][mappingTableKey]["measures"].clear() + for i in range(0, len(val_news)): + mea = val_news[i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["modbusSlave"]["mappingTable"][mappingTableKey]["measures"][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="modbusSlaveMappingTable", key=(mea["measureName"]), + value=mea_key) + + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_sl651_csv(self, filename, topic): + count_line = 1 + ctls = self.ucfg.controllers + measures = self.ucfg.measures + val_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for mp in csv_reader: + count_line += 1 + val = dict() + ctr = self.find_ctl_name(mp["Controller Name"]) + if not ctr: + logger.warn("import sl651 slava csv error: invalid Controller Name: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name: " + mp["Controller Name"], topic, -11008) + mea = self.find_measure_by_name(mp["Controller Name"], mp["Measuring Point Name"]) + if not mea: + logger.warn("import sl651 server csv error: invalid Measuring Point Name: %s" % mp["Measuring Point Name"]) + return self.response_error_params(count_line, "Measuring Point Name: " + mp["Measuring Point Name"], topic, -11008) + if "_Id" in mp: + val["_id"] = mp["_Id"] + val["measureName"] = mp["Measuring Point Name"] + val["ctrlName"] = mp["Controller Name"] + val["readWrite"] = mp["readWrite"] + val["dataType"] = mp["Data Type"] + val["identifi"] = mp["Identifi"] + if val["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + if "EnableBit" not in mp: + val["enableBit"] = int(mea["enableBit"]) + else: + if int(mp["EnableBit"]) != mea["enableBit"]: + logger.warn("import sl651 server csv error: invalid EnableBit: %s" % str(mp["EnableBit"])) + return self.response_error_params(count_line, "EnableBit: " + str(mp["EnableBit"]), topic, -11008) + val["enableBit"] = int(mp["EnableBit"]) + if "Mapping Data Type" in mp: + val["northDataType"] = mp["Mapping Data Type"] + else: + if "enableBit" in val: + if val["enableBit"] == 1: + val["northDataType"] = "BIT" + else: + val["northDataType"] = val["dataType"] + else: + val["northDataType"] = val["dataType"] + if mp["Identifi"] in ('遥测站状态及报警信息', '水温', 'pH值', '溶解氧', '电导率', '浊度', + '高锰酸盐指数', '氧化还原电位', '氨氮', '总磷', '总氮', '交流A相电压', + '交流B相电压', '交流C相电压', '交流A相电流', '交流B相电流', '交流C相电流', + '化学需氧量(COD)', 'pH值2', '溶解氧2', '电导率2', '浊度2', + '高锰酸盐指数2', '氧化还原电位2', '氨氮2', '总磷2', '总氮2', + '化学需氧量(COD)2', 'pH值3', '溶解氧3', '电导率3', '浊度3', + '高锰酸盐指数3', '氧化还原电位3', '氨氮3', '总磷3', '总氮3', + '化学需氧量(COD)3', '进口累计流量1', '进口瞬时流量1', '进口流速1', + '进口累计流量2', '进口瞬时流量2', '进口流速2', '进口累计流量3', '进口瞬时流量3', + '进口流速3', '出口累计流量1', '出口瞬时流量1', '出口流速1', '出口累计流量2', + '出口瞬时流量2', '出口流速2', '出口累计流量3', '出口瞬时流量3', '出口流速3', + '总用电量', '信号强度', '悬浮物', '悬浮物2', '悬浮物3'): + val["identifi"] = mp["Identifi"] + else: + logger.warn("import sl651 slave csv error: invalid Identifi: %s" % mp["Identifi"]) + return self.response_error_params(count_line, "identifi: " + mp["Identifi"], topic, -11008) + ind = self.verify_kv_and_get_index(ctls, "name", val["ctrlName"]) + if ind is None: + logger.warn("import sl651 server csv error: invalid Controller Name: %s" % val["ctrlName"]) + return self.response_error_params(count_line, "Controller Name: " + val["ctrlName"], topic, -11008) + ind = self.verify_kv_and_get_index(measures, "name", (val["measureName"]), fk="ctrlName", fv=(val["ctrlName"])) + if ind is None: + logger.warn("import sl651 server csv error: invalid Measuring Point Name: %s" % val["measureName"]) + return self.response_error_params(count_line, "Measuring Point Name: " + val["measureName"], topic, -11008) + if val["readWrite"] not in ('ro', 'rw', 'wo'): + logger.warn("import sl651 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, + Float, + String, Bcd, Bcd32, Ulong, Long, + Double]: + logger.warn("import sl651 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["northDataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, + Float, + String, Bcd, Bcd32, Ulong, Long, + Double]: + logger.warn("import sl651 server csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + mea = measures[ind] + if val["readWrite"] != mea["readWrite"]: + logger.warn("import sl651 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] != mea["dataType"]: + logger.warn("import sl651 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + for vn in val_news: + if vn["ctrlName"] == val["ctrlName"] and vn["measureName"] == val["measureName"] and vn["typeId"][0] == val["typeId"][0]: + logger.warn("import sl651 slave csv error: invalid measureName: %s" % val["measureName"]) + return self.response_error_params(count_line, "Repeated measureName: " + val["measureName"], topic, -11008) + + val_news.append(val) + + if len(val_news) == 0: + logger.warn("import sl651 slave csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + self.ucfg.sl651Slave["mapping_table"] = val_news + simple_config = dict() + simple_config["sl651Slave"] = dict() + simple_config["sl651Slave"]["mapping_table"] = collections.OrderedDict() + self.schema.simple_config["device_supervisor"]["sl651Slave"]["mapping_table"].clear() + for i in range(0, len(self.ucfg.sl651Slave["mapping_table"])): + mea = self.ucfg.sl651Slave["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["sl651Slave"]["mapping_table"][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="sl651Slave", key=(mea["measureName"]), value=mea_key) + + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_hj212_csv(self, filename, topic): + count_line = 1 + ctls = self.ucfg.controllers + measures = self.ucfg.measures + val_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for mp in csv_reader: + count_line += 1 + val = dict() + ctr = self.find_ctl_name(mp["Controller Name"]) + if not ctr: + logger.warn("import hj212 slava csv error: invalid Controller Name: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name: " + mp["Controller Name"], topic, -11008) + mea = self.find_measure_by_name(mp["Controller Name"], mp["Measuring Name"]) + if not mea: + logger.warn("import hj212 server csv error: invalid Measuring Name: %s" % mp["Measuring Name"]) + return self.response_error_params(count_line, "Measuring Name: " + mp["Measuring Name"], topic, -11008) + if "_Id" in mp: + val["_id"] = mp["_Id"] + val["measureName"] = mp["Measuring Name"] + val["ctrlName"] = mp["Controller Name"] + val["readWrite"] = mp["readWrite"] + val["dataType"] = mp["Data Type"] + val["encode"] = mp["Encode"] + val["block"] = mp["Block"] + if val["dataType"] in [Byte, Sint, Bcd, Bcd32, Word, Int, Dword, Dint, Ulong, Long]: + if "EnableBit" not in mp: + val["enableBit"] = int(mea["enableBit"]) + else: + if int(mp["EnableBit"]) != mea["enableBit"]: + logger.warn("import hj212 server csv error: invalid EnableBit: %s" % str(mp["EnableBit"])) + return self.response_error_params(count_line, "EnableBit: " + str(mp["EnableBit"]), topic, -11008) + val["enableBit"] = int(mp["EnableBit"]) + if "Mapping Data Type" in mp: + val["northDataType"] = mp["Mapping Data Type"] + else: + if "enableBit" in val: + if val["enableBit"] == 1: + val["northDataType"] = "BIT" + else: + val["northDataType"] = val["dataType"] + else: + val["northDataType"] = val["dataType"] + if val["encode"] == "": + logger.warn("import hj212 server csv error: invalid encode: %s" % val["encode"]) + return self.response_error_params(count_line, "encode: " + val["encode"], topic, -11008) + if val["block"] == "": + logger.warn("import hj212 server csv error: invalid block: %s" % val["block"]) + return self.response_error_params(count_line, "block: " + val["block"], topic, -11008) + block_lists = self.schema.simple_config["device_supervisor"]["hj212Client"]["config"]["block_list"] + haveBlock = False + for block in list(block_lists.values()): + if block["name"] == val["block"]: + haveBlock = True + break + + if not haveBlock: + logger.warn("import hj212 server csv error: invalid block: %s" % val["block"]) + return self.response_error_params(count_line, "block: " + val["block"], topic, -11008) + ind = self.verify_kv_and_get_index(ctls, "name", val["ctrlName"]) + if ind is None: + logger.warn("import hj212 server csv error: invalid Controller Name: %s" % val["ctrlName"]) + return self.response_error_params(count_line, "Controller Name: " + val["ctrlName"], topic, -11008) + ind = self.verify_kv_and_get_index(measures, "name", (val["measureName"]), fk="ctrlName", fv=(val["ctrlName"])) + if ind is None: + logger.warn("import hj212 server csv error: invalid Measuring Name: %s" % val["measureName"]) + return self.response_error_params(count_line, "Measuring Name: " + val["measureName"], topic, -11008) + if val["readWrite"] not in ('ro', 'rw', 'wo'): + logger.warn("import hj212 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, + Float, + String, Bcd, Bcd32, Ulong, Long, Double]: + logger.warn("import hj212 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["northDataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, + Dint, + Float, String, Bcd, Bcd32, Ulong, Long, + Double]: + logger.warn("import hj212 server csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + mea = measures[ind] + if val["readWrite"] != mea["readWrite"]: + logger.warn("import hj212 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] != mea["dataType"]: + logger.warn("import hj212 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + for vn in val_news: + if vn["ctrlName"] == val["ctrlName"]: + if vn["measureName"] == val["measureName"]: + logger.warn("import hj212 slave csv error: invalid measureName: %s" % val["measureName"]) + return self.response_error_params(count_line, "Repeated measureName: " + val["measureName"], topic, -11008) + if vn["block"] == val["block"] and vn["encode"] == val["encode"]: + logger.warn("import bacnet server csv error: invalid addr: %s %s" % ( + str(val["block"]), str(val["encode"]))) + return self.response_error_params(count_line, "Repeated addr: " + str(val["block"]) + str(val["encode"]), topic, -11008) + + val_news.append(val) + + if len(val_news) == 0: + logger.warn("import hj212 slave csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + self.ucfg.hj212Client["mapping_table"] = val_news + simple_config = dict() + simple_config["hj212Client"] = dict() + simple_config["hj212Client"]["mapping_table"] = collections.OrderedDict() + self.schema.simple_config["device_supervisor"]["hj212Client"]["mapping_table"].clear() + for i in range(0, len(self.ucfg.hj212Client["mapping_table"])): + mea = self.ucfg.hj212Client["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["hj212Client"]["mapping_table"][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="hj212Client", key=(mea["measureName"]), value=mea_key) + + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_bacnet_server_csv(self, filename, topic): + count_line = 1 + ctls = self.ucfg.controllers + measures = self.ucfg.measures + val_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for mp in csv_reader: + count_line += 1 + val = dict() + ctr = self.find_ctl_name(mp["Control Name"]) + if not ctr: + logger.warn("import bacnet server csv error: invalid Controller Name: %s" % mp["Control Name"]) + return self.response_error_params(count_line, "Control Name: " + mp["Control Name"], topic, -11008) + mea = self.find_measure_by_name(mp["Control Name"], mp["Measuring Point Name"]) + if not mea: + logger.warn("import bacnet server csv error: invalid Measuring Name: %s" % mp["Measuring Point Name"]) + return self.response_error_params(count_line, "Measuring Name: " + mp["Measuring Point Name"], topic, -11008) + if "_Id" in mp: + val["_id"] = mp["_Id"] + val["measureName"] = mp["Measuring Point Name"] + val["ctrlName"] = mp["Control Name"] + val["readWrite"] = mp["Read/Write"] + val["dataType"] = mp["Data Type"] + val["objectType"] = mp["Object Type"] + val["objectInstance"] = int(mp["Instance Number"]) + if val["dataType"] in [Byte, Sint, Bcd, Bcd32, Word, Int, Dword, Dint, Ulong, Long]: + if "Enable Bit" not in mp: + val["enableBit"] = int(mea["enableBit"]) + else: + if int(mp["Enable Bit"]) != mea["enableBit"]: + logger.warn("import bacnet server csv error: invalid EnableBit: %s" % str(mp["Enable Bit"])) + return self.response_error_params(count_line, "EnableBit: " + str(mp["Enable Bit"]), topic, -11008) + val["enableBit"] = int(mp["Enable Bit"]) + if "Mapping Data Type" in mp: + val["northDataType"] = mp["Mapping Data Type"] + else: + if "enableBit" in val: + if val["enableBit"] == 1: + val["northDataType"] = "BIT" + else: + val["northDataType"] = val["dataType"] + else: + val["northDataType"] = val["dataType"] + ind = self.verify_kv_and_get_index(ctls, "name", val["ctrlName"]) + if ind is None: + logger.warn("import bacnet server csv error: invalid Controller Name: %s" % val["ctrlName"]) + return self.response_error_params(count_line, "Controller Name: " + val["ctrlName"], topic, -11008) + ind = self.verify_kv_and_get_index(measures, "name", (val["measureName"]), fk="ctrlName", fv=(val["ctrlName"])) + if ind is None: + logger.warn("import bacnet server csv error: invalid Measuring Name: %s" % val["measureName"]) + return self.response_error_params(count_line, "Measuring Name: " + val["measureName"], topic, -11008) + if val["readWrite"] not in ('ro', 'rw', 'wo'): + logger.warn("import bacnet server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, + Float, + String, Bcd, Bcd32, Ulong, Long, Double]: + logger.warn("import bacnet server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["northDataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, + Float, + String, Bcd, Bcd32, Ulong, Long, Double]: + logger.warn("import bacnet server csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + mea = measures[ind] + if val["readWrite"] != mea["readWrite"]: + logger.warn("import bacnet server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] != mea["dataType"]: + logger.warn("import bacnet server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + for vn in val_news: + if vn["objectType"] == val["objectType"] and vn["objectInstance"] == val["objectInstance"]: + logger.warn("import bacnet server csv error: invalid addr: %s %s" % ( + str(val["objectType"]), str(val["objectInstance"]))) + return self.response_error_params(count_line, "Repeated addr: " + str(val["objectType"]) + str(val["objectInstance"]), topic, -11008) + + val_news.append(val) + + if len(val_news) == 0: + logger.warn("import bacnet slave csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + self.ucfg.bacnetServer["mapping_table"] = val_news + simple_config = dict() + simple_config["bacnetServer"] = dict() + simple_config["bacnetServer"]["mapping_table"] = collections.OrderedDict() + self.schema.simple_config["device_supervisor"]["bacnetServer"]["mapping_table"].clear() + for i in range(0, len(self.ucfg.bacnetServer["mapping_table"])): + mea = self.ucfg.bacnetServer["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["bacnetServer"]["mapping_table"][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="bacnetServer", key=(mea["measureName"]), value=mea_key) + + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_dnp3_server_csv(self, filename, topic): + count_line = 1 + ctls = self.ucfg.controllers + measures = self.ucfg.measures + val_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for mp in csv_reader: + count_line += 1 + val = dict() + ctr = self.find_ctl_name(mp["Controller Name"]) + if not ctr: + logger.warn("import dnp3 server csv error: invalid Controller Name: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name: " + mp["Controller Name"], topic, -11008) + mea = self.find_measure_by_name(mp["Controller Name"], mp["Measuring Name"]) + if not mea: + logger.warn("import dnp3 server csv error: invalid Measuring Name: %s" % mp["Measuring Name"]) + return self.response_error_params(count_line, "Measuring Name: " + mp["Measuring Name"], topic, -11008) + if "_Id" in mp: + val["_id"] = mp["_Id"] + val["measureName"] = mp["Measuring Name"] + val["ctrlName"] = mp["Controller Name"] + val["readWrite"] = mp["readWrite"] + val["dataType"] = mp["Data Type"] + val["addrType"] = int(mp["Object Type"]) + val["addr"] = int(mp["Instance Number"]) + if val["dataType"] in [Byte, Sint, Bcd, Bcd32, Word, Int, Dword, Dint, Ulong, Long]: + if "EnableBit" not in mp: + val["enableBit"] = int(mea["enableBit"]) + else: + if int(mp["EnableBit"]) != mea["enableBit"]: + logger.warn("import dnp3 server csv error: invalid EnableBit: %s" % str(mp["EnableBit"])) + return self.response_error_params(count_line, "EnableBit: " + str(mp["EnableBit"]), topic, -11008) + val["enableBit"] = int(mp["EnableBit"]) + if "Mapping Data Type" in mp: + val["northDataType"] = mp["Mapping Data Type"] + else: + if "enableBit" in val: + if val["enableBit"] == 1: + val["northDataType"] = "BIT" + else: + val["northDataType"] = val["dataType"] + else: + val["northDataType"] = val["dataType"] + ind = self.verify_kv_and_get_index(ctls, "name", val["ctrlName"]) + if ind is None: + logger.warn("import dnp3 server csv error: invalid Controller Name: %s" % val["ctrlName"]) + return self.response_error_params(count_line, "Controller Name: " + val["ctrlName"], topic, -11008) + ind = self.verify_kv_and_get_index(measures, "name", (val["measureName"]), fk="ctrlName", fv=(val["ctrlName"])) + if ind is None: + logger.warn("import dnp3 server csv error: invalid Measuring Name: %s" % val["measureName"]) + return self.response_error_params(count_line, "Measuring Name: " + val["measureName"], topic, -11008) + if val["readWrite"] not in ('ro', 'rw', 'wo'): + logger.warn("import dnp3 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, Float, + String, + Bcd, Bcd32, Ulong, Long, Double]: + logger.warn("import dnp3 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["northDataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, + Dint, + Float, String, Bcd, Bcd32, Ulong, Long, + Double]: + logger.warn("import dnp3 server csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + mea = measures[ind] + if val["readWrite"] != mea["readWrite"]: + logger.warn("import dnp3 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] != mea["dataType"]: + logger.warn("import dnp3 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + for vn in val_news: + if vn["ctrlName"] == val["ctrlName"]: + if vn["measureName"] == val["measureName"]: + logger.warn("import dnp3 slave csv error: invalid measureName: %s" % val["measureName"]) + return self.response_error_params(count_line, "Repeated measureName: " + val["measureName"], topic, -11008) + if vn["addrType"] == val["addrType"] and vn["addr"] == val["addr"]: + logger.warn("import dnp3 slave csv error: invalid addr: %s %s" % (str(val["addrType"]), str(val["addr"]))) + return self.response_error_params(count_line, "Repeated addr: " + str(val["addrType"]) + str(val["addr"]), topic, -11008) + + val_news.append(val) + + if len(val_news) == 0: + logger.warn("import dnp3 slave csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + self.ucfg.Dnp3Server["mapping_table"] = val_news + simple_config = dict() + simple_config["Dnp3Server"] = dict() + simple_config["Dnp3Server"]["mapping_table"] = collections.OrderedDict() + self.schema.simple_config["device_supervisor"]["Dnp3Server"]["mapping_table"].clear() + for i in range(0, len(self.ucfg.Dnp3Server["mapping_table"])): + mea = self.ucfg.Dnp3Server["mapping_table"][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["Dnp3Server"]["mapping_table"][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="Dnp3Server", key=(mea["measureName"]), value=mea_key) + + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_iec61850_server_csv(self, filename, topic): + count_line = 1 + ctls = self.ucfg.controllers + measures = self.ucfg.measures + val_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for mp in csv_reader: + count_line += 1 + val = dict() + ctr = self.find_ctl_name(mp["Controller Name"]) + if not ctr: + logger.warn("import iec61850 server csv error: invalid Controller Name: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name: " + mp["Controller Name"], topic, -11008) + mea = self.find_measure_by_name(mp["Controller Name"], mp["Measuring Name"]) + if not mea: + logger.warn("import iec61850 server csv error: invalid Measuring Name: %s" % mp["Measuring Name"]) + return self.response_error_params(count_line, "Measuring Name: " + mp["Measuring Name"], topic, -11008) + val["measureName"] = mp["Measuring Name"] + val["ctrlName"] = mp["Controller Name"] + val["readWrite"] = mp["readWrite"] + val["dataType"] = mp["Data Type"] + val["cdcType"] = str(mp["CDC Type"]) + val["addDataSet"] = int(mp["Add Data Set"]) + if val["addDataSet"] == 1: + val["dataSetName"] = mp["Data Set Name"] + if len(val["dataSetName"]) > 256: + logger.warn("import iec61850 server csv error: invalid Data Set Name: %s" % str(mp["Data Set Name"])) + return self.response_error_params(count_line, "Data Set Name: " + str(mp["Data Set Name"]), topic, -11008) + if val["cdcType"] not in ('SPS', 'DPS', 'INS', 'MV', 'SPC', 'DPC', + 'INC', 'APC'): + logger.warn("import iec61850 server csv error: invalid CDC Type: %s" % str(mp["CDC Type"])) + return self.response_error_params(count_line, "CDC Type: " + str(mp["CDC Type"]), topic, -11008) + val["daRef"] = str(mp["Data Attribute Reference"]) + if val["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Ulong, Long]: + if "EnableBit" not in mp: + val["enableBit"] = int(mea["enableBit"]) + else: + if int(mp["EnableBit"]) != mea["enableBit"]: + logger.warn("import iec61850 server csv error: invalid EnableBit: %s" % str(mp["EnableBit"])) + return self.response_error_params(count_line, "EnableBit: " + str(mp["EnableBit"]), topic, -11008) + val["enableBit"] = int(mp["EnableBit"]) + if "Mapping Data Type" in mp: + val["northDataType"] = mp["Mapping Data Type"] + else: + if "enableBit" in val: + if val["enableBit"] == 1: + val["northDataType"] = "BIT" + else: + val["northDataType"] = val["dataType"] + else: + val["northDataType"] = val["dataType"] + ind = self.verify_kv_and_get_index(ctls, "name", val["ctrlName"]) + if ind is None: + logger.warn("import iec61850 server csv error: invalid Controller Name: %s" % val["ctrlName"]) + return self.response_error_params(count_line, "Controller Name: " + val["ctrlName"], topic, -11008) + ind = self.verify_kv_and_get_index(measures, "name", (val["measureName"]), fk="ctrlName", fv=(val["ctrlName"])) + if ind is None: + logger.warn("import iec61850 server csv error: invalid Measuring Name: %s" % val["measureName"]) + return self.response_error_params(count_line, "Measuring Name: " + val["measureName"], topic, -11008) + if val["readWrite"] not in ('ro', 'rw', 'wo'): + logger.warn("import iec61850 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, Float, String, + Bcd, Ulong, Long, Double]: + logger.warn("import iec61850 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["northDataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, + Dint, + Float, String, Bcd, Bcd32, Ulong, Long, + Double]: + logger.warn("import iec61850 server csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + if val["addDataSet"] not in (0, 1): + logger.warn("import iec61850 server csv error: invalid Add Data Set: %s" % mp["Add Data Set"]) + return self.response_error_params(count_line, "addDataSet: " + mp["Add Data Set"], topic, -11008) + mea = measures[ind] + if val["readWrite"] != mea["readWrite"]: + logger.warn("import iec61850 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] != mea["dataType"]: + logger.warn("import iec61850 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + for vn in val_news: + if vn["ctrlName"] == val["ctrlName"] and vn["measureName"] == val["measureName"]: + logger.warn("import iec61850 slave csv error: invalid measureName: %s" % val["measureName"]) + return self.response_error_params(count_line, "Repeated measureName: " + val["measureName"], topic, -11008) + + val_news.append(val) + + if len(val_news) == 0: + logger.warn("import iec61850 slave csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + self.ucfg.iec61850Server["mapping_table"] = val_news + simple_config = dict() + simple_config["iec61850Server"] = dict() + simple_config["iec61850Server"]["mapping_table"] = collections.OrderedDict() + self.schema.simple_config["device_supervisor"]["iec61850Server"]["mapping_table"].clear() + for i in range(0, len(self.ucfg.iec61850Server["mapping_table"])): + mea = self.ucfg.iec61850Server["mapping_table"][i] + mea_key = simple_config_generate_array_key(i) + simple_config["iec61850Server"]["mapping_table"][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="iec61850Server", key=(mea["measureName"]), value=mea_key) + + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_snmp_agent_csv(self, filename, topic): + count_line = 1 + ctls = self.ucfg.controllers + measures = self.ucfg.measures + val_news = list() + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for mp in csv_reader: + count_line += 1 + val = dict() + ctr = self.find_ctl_name(mp["Controller Name"]) + if not ctr: + logger.warn("import snmp agent csv error: invalid Controller Name: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name: " + mp["Controller Name"], topic, -11008) + mea = self.find_measure_by_name(mp["Controller Name"], mp["Measuring Name"]) + if not mea: + logger.warn("import snmp agent csv error: invalid Measuring Name: %s" % mp["Measuring Name"]) + return self.response_error_params(count_line, "Measuring Name: " + mp["Measuring Name"], topic, -11008) + val["measureName"] = mp["Measuring Name"] + val["ctrlName"] = mp["Controller Name"] + val["readWrite"] = mp["readWrite"] + val["dataType"] = mp["Data Type"] + val["oid"] = mp["OID"] + if val["dataType"] in [Byte, Sint, Bcd, Bcd32, Word, Int, Dword, Dint, Ulong, Long]: + if "EnableBit" not in mp: + val["enableBit"] = int(mea["enableBit"]) + else: + if int(mp["EnableBit"]) != mea["enableBit"]: + logger.warn("import snmp agent csv error: invalid EnableBit: %s" % str(mp["EnableBit"])) + return self.response_error_params(count_line, "EnableBit: " + str(mp["EnableBit"]), topic, -11008) + val["enableBit"] = int(mp["EnableBit"]) + if "Mapping Data Type" in mp: + val["northDataType"] = mp["Mapping Data Type"] + else: + if "enableBit" in val: + if val["enableBit"] == 1: + val["northDataType"] = "BIT" + else: + val["northDataType"] = val["dataType"] + else: + val["northDataType"] = val["dataType"] + ind = self.verify_kv_and_get_index(ctls, "name", val["ctrlName"]) + if ind is None: + logger.warn("import snmp agent csv error: invalid Controller Name: %s" % val["ctrlName"]) + return self.response_error_params(count_line, "Controller Name: " + val["ctrlName"], topic, -11008) + ind = self.verify_kv_and_get_index(measures, "name", (val["measureName"]), fk="ctrlName", fv=(val["ctrlName"])) + if ind is None: + logger.warn("import snmp agent csv error: invalid Measuring Name: %s" % val["measureName"]) + return self.response_error_params(count_line, "Measuring Name: " + val["measureName"], topic, -11008) + if val["readWrite"] not in ('ro', 'rw', 'wo'): + logger.warn("import snmp agent csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, Float, + String, + Bcd, Bcd32, Ulong, Long, Double]: + logger.warn("import snmp agent csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["northDataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, + Dint, + Float, String, Bcd, Bcd32, Ulong, Long, + Double]: + logger.warn("import snmp agent csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + if str(val["oid"]).rsplit(".", 1)[0] != ".1.3.6.1.4.1.36153.1.2.0": + logger.warn("import snmp agent csv error: invalid oid: %s" % val["oid"]) + return self.response_error_params(count_line, "oid: " + val["oid"], topic, -11008) + mea = measures[ind] + if val["readWrite"] != mea["readWrite"]: + logger.warn("import snmp agent csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] != mea["dataType"]: + logger.warn("import snmp agent csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + for vn in val_news: + if vn["ctrlName"] == val["ctrlName"]: + if vn["measureName"] == val["measureName"]: + logger.warn("import snmp agent csv error: invalid measureName: %s" % val["measureName"]) + return self.response_error_params(count_line, "Repeated measureName: " + val["measureName"], topic, -11008) + if vn["oid"] == val["oid"]: + logger.warn("import snmp agent csv error: invalid oid: %s" % val["oid"]) + return self.response_error_params(count_line, "Repeated oid: " + val["oid"], topic, -11008) + + val_news.append(val) + + if len(val_news) == 0: + logger.warn("import snmp agent csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + self.ucfg.snmpAgent["mapping_table"] = val_news + simple_config = dict() + simple_config["snmpAgent"] = dict() + simple_config["snmpAgent"]["mapping_table"] = collections.OrderedDict() + self.schema.simple_config["device_supervisor"]["snmpAgent"]["mapping_table"].clear() + for i in range(0, len(self.ucfg.snmpAgent["mapping_table"])): + mea = self.ucfg.snmpAgent["mapping_table"][i] + mea_key = simple_config_generate_array_key(i) + simple_config["snmpAgent"]["mapping_table"][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="snmpAgent", key=(mea["measureName"]), value=mea_key) + + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_iec104_csvParse error at or near `COME_FROM' instruction at offset 3810_0 + + def import_iec101Slave_csv(self, filename, topic, type): + count_line = 1 + val_news = list() + in_val = list() + ctls = self.ucfg.controllers + measures = self.ucfg.measures + serlist = [] + for serverList in self.ucfg.iec101Server["serverList"]: + serlist.append(serverList["asduAddr"]) + + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for mp in csv_reader: + count_line += 1 + val = dict() + if "." in mp["Controller Name"] or "/" in mp["Controller Name"]: + logger.warn("import iec101 server csv error: invalid ControllerName: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name have '.' or '/': " + mp["Controller Name"], topic, -11008) + ctr = self.find_ctl_name(mp["Controller Name"]) + if not ctr: + logger.warn("import iec101 server csv error: invalid Controller Name: %s" % mp["Controller Name"]) + return self.response_error_params(count_line, "Controller Name: " + mp["Controller Name"], topic, -11008) + mea = self.find_measure_by_name(mp["Controller Name"], mp["Measuring Point Name"]) + if not mea: + logger.warn("import iec101 server csv error: invalid Measuring Point Name: %s" % mp["Measuring Point Name"]) + return self.response_error_params(count_line, "Measuring Point Name: " + mp["Measuring Point Name"], topic, -11008) + con_list = [] + if "_Id" in mp: + val["_id"] = mp["_Id"] + val["measureName"] = mp["Measuring Point Name"] + val["ctrlName"] = mp["Controller Name"] + val["readWrite"] = mp["readWrite"] + val["dataType"] = mp["Data Type"] + if val["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + if "EnableBit" not in mp: + val["enableBit"] = int(mea["enableBit"]) + else: + if int(mp["EnableBit"]) != mea["enableBit"]: + logger.warn("import iec101 server csv error: invalid EnableBit: %s" % str(mp["EnableBit"])) + return self.response_error_params(count_line, "EnableBit: " + str(mp["EnableBit"]), topic, -11008) + val["enableBit"] = int(mp["EnableBit"]) + if "Mapping Data Type" in mp: + val["northDataType"] = mp["Mapping Data Type"] + else: + if "enableBit" in val: + if val["enableBit"] == 1: + val["northDataType"] = "BIT" + else: + val["northDataType"] = val["dataType"] + else: + val["northDataType"] = val["dataType"] + if mp["ASDU"] != "": + val["asduAddr"] = int(mp["ASDU"]) + else: + logger.warn("import iec101 server csv error: invalid ASDU: %s" % mp["ASDU"]) + return self.response_error_params(count_line, "ASDU: " + mp["ASDU"], topic, -11008) + if mp["startIOA"] != "": + val["startMapAddr"] = int(mp["startIOA"]) + else: + logger.warn("import iec101 server csv error: invalid startIOA: %s" % mp["startIOA"]) + return self.response_error_params(count_line, "startIOA: " + mp["startIOA"], topic, -11008) + if val["ctrlName"] not in con_list: + con_list.append(val["ctrlName"]) + if mp["Type Id"] == "": + logger.warn("import iec101 server csv error: invalid Type Id: %s" % mp["Type Id"]) + return self.response_error_params(count_line, "Type Id: " + mp["Type Id"], topic, -11008) + id_temp = re.findall("\\d+", mp["Type Id"]) + val["typeId"] = [] + if type == "YX": + if id_temp[0] == "1": + val["typeId"].append(1) + else: + logger.warn("import iec101 server csv error: invalid YX Type Id: %s" % mp["Type Id"]) + return self.response_error_params(count_line, "Type Id: " + mp["Type Id"], topic, -11008) + if type == "YC": + if id_temp[0] == "13": + val["typeId"].append(7) + else: + logger.warn("import iec101 server csv error: invalid YC Type Id: %s" % mp["Type Id"]) + return self.response_error_params(count_line, "Type Id: " + mp["Type Id"], topic, -11008) + elif type == "YK": + if id_temp[0] == "45": + val["typeId"].append(10) + else: + logger.warn("import iec101 server csv error: invalid YK Type Id: %s" % mp["Type Id"]) + return self.response_error_params(count_line, "Type Id: " + mp["Type Id"], topic, -11008) + else: + val["typeId"].append(int(id_temp[0])) + val["endMapAddr"] = mp["ASDU"] + " " + mp["startIOA"] + ind = self.verify_kv_and_get_index(ctls, "name", val["ctrlName"]) + if ind is None: + logger.warn("import iec101 server csv error: invalid Controller Name: %s" % val["ctrlName"]) + return self.response_error_params(count_line, "Controller Name: " + val["ctrlName"], topic, -11008) + ind = self.verify_kv_and_get_index(measures, "name", (val["measureName"]), fk="ctrlName", fv=(val["ctrlName"])) + if ind is None: + logger.warn("import iec101 server csv error: invalid Measuring Point Name: %s" % val["measureName"]) + return self.response_error_params(count_line, "Measuring Point Name: " + val["measureName"], topic, -11008) + if val["readWrite"] not in ('ro', 'rw', 'wo'): + logger.warn("import iec101 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, + Float, String, + Bcd, Bcd32, + Ulong, + Long, Double]: + logger.warn("import iec101 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["northDataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, + Float, + String, Bcd, + Bcd32, Ulong, + Long, Double]: + logger.warn("import iec101 server csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + mea = measures[ind] + if val["readWrite"] != mea["readWrite"]: + logger.warn("import iec101 server csv error: invalid readWrite: %s" % val["readWrite"]) + return self.response_error_params(count_line, "readWrite: " + val["readWrite"], topic, -11008) + if val["dataType"] != mea["dataType"]: + logger.warn("import iec101 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if type == "YC": + if val["dataType"] not in [Byte, Sint, Word, Int, Dword, Dint, Float, + Double, + Bcd, + Bcd32, + Ulong, + Long]: + logger.warn("import iec101 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + elif val["northDataType"] not in [Byte, Sint, Word, Int, Dword, Dint, Float, + Double, + Bcd, + Bcd32, + Ulong, + Long]: + logger.warn("import iec101 server csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + if "enableBit" in val and val["enableBit"] == 1: + logger.warn("import iec101 server csv error: invalid enableBit: %d" % val["enableBit"]) + return self.response_error_params(count_line, "EnableBit is 1", topic, -11008) + if val["readWrite"] not in ('ro', 'rw'): + logger.warn("import iec101 server csv error: invalid Read Write: %s" % val["readWrite"]) + return self.response_error_params(count_line, "Read Write: " + val["readWrite"], topic, -11008) + elif type == "YX" or type == "YK": + if "enableBit" in val: + if val["enableBit"] != 1: + logger.warn("import iec101 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + elif val["dataType"] not in [Bool, Bit]: + logger.warn("import iec101 server csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["northDataType"] not in [Bool, Bit]: + logger.warn("import iec101 server csv error: invalid Mapping Data Type: %s" % val["northDataType"]) + return self.response_error_params(count_line, "Mapping Data Type: " + val["northDataType"], topic, -11008) + if val["readWrite"] not in ('ro', 'wo', 'rw'): + logger.warn("import iec101 server csv error: invalid Read Write: %s" % val["readWrite"]) + return self.response_error_params(count_line, "Read Write: " + val["readWrite"], topic, -11008) + if val["asduAddr"] not in serlist: + logger.warn("import iec101 server csv error: invalid ASDU: %s" % val["asduAddr"]) + return self.response_error_params(count_line, "ASDU: " + str(val["asduAddr"]), topic, -11008) + link = mp["Type Id"] + " " + mp["startIOA"] + for mp in in_val: + if mp == link: + logger.warn("import iec101 server csv error: invalid link: %s" % link) + return self.response_error_params(count_line, "Overlapping: " + link, topic, -11008) + + in_val.append(link) + for vn in val_news: + if vn["ctrlName"] == val["ctrlName"] and vn["measureName"] == val["measureName"] and vn["typeId"][0] == val["typeId"][0]: + logger.warn("import iec101 server csv error: invalid measureName: %s" % val["measureName"]) + return self.response_error_params(count_line, "Repeated measureName: " + val["measureName"], topic, -11008) + + val_news.append(val) + + if len(val_news) == 0: + logger.warn("import iec101 server csv error: Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + self.ucfg.iec101Server["mappingTable"][type] = val_news + simple_config = dict() + simple_config["iec101Server"] = dict() + simple_config["iec101Server"]["mappingTable"] = dict() + simple_config["iec101Server"]["mappingTable"][type] = collections.OrderedDict() + self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"][type].clear() + for i in range(0, len(self.ucfg.iec101Server["mappingTable"][type])): + mea = self.ucfg.iec101Server["mappingTable"][type][i] + if "_id" in mea: + mea_key = mea["_id"] + else: + mea_key = simple_config_generate_array_key(i) + mea["_id"] = mea_key + simple_config["iec101Server"]["mappingTable"][type][mea_key] = mea + self.schema.simple_config_indexs.add_index(typ="iec101Server", key=(mea["measureName"]), value=mea_key) + + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_iec104Client_csv(self, filename, topic, type): + measures = self.ucfg.measures + count_line = 1 + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + simple_config = collections.OrderedDict() + for i in range(0, len(csv_reader)): + count_line += 1 + val = dict() + if "." in csv_reader[i]["Controller Name"] or "/" in csv_reader[i]["Controller Name"]: + logger.warn("import iec104 client csv error: invalid ControllerName: %s" % csv_reader[i]["Controller Name"]) + return self.response_error_params(count_line, "Controller Name have '.' or '/': " + csv_reader[i]["Controller Name"], topic, -11008) + ctr = self.find_ctl_name(csv_reader[i]["Controller Name"]) + if not ctr: + logger.warn("import iec104 client csv error: invalid Controller Name: %s" % csv_reader[i]["Controller Name"]) + return self.response_error_params(count_line, "Controller Name: " + csv_reader[i]["Controller Name"], topic, -11008) + mea = self.find_measure_by_name(csv_reader[i]["Controller Name"], csv_reader[i]["Measuring Point Name"]) + if not mea: + logger.warn("import iec104 client csv error: invalid Measuring Point Name: %s" % csv_reader[i]["Measuring Point Name"]) + return self.response_error_params(count_line, "Measuring Point Name: " + csv_reader[i]["Measuring Point Name"], topic, -11008) + if "_Id" in csv_reader[i]: + val["_id"] = csv_reader[i]["_Id"] + key = val["_id"] + else: + key = simple_config_generate_array_key(i) + val["_id"] = key + val["measureName"] = csv_reader[i]["Measuring Point Name"] + val["ctrlName"] = csv_reader[i]["Controller Name"] + val["dataType"] = csv_reader[i]["Data Type"] + val["readWrite"] = csv_reader[i]["Read Write"] + if val["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + if "EnableBit" not in csv_reader[i]: + val["enableBit"] = int(mea["enableBit"]) + elif int(csv_reader[i]["EnableBit"]) != mea["enableBit"]: + logger.warn("import iec104 client csv error: invalid EnableBit: %s" % str(csv_reader[i]["EnableBit"])) + return self.response_error_params(count_line, "EnableBit: " + str(csv_reader[i]["EnableBit"]), topic, -11008) + val["enableBit"] = int(csv_reader[i]["EnableBit"]) + elif type == "YC": + if val["dataType"] not in [Byte, Sint, Word, Int, Dword, Dint, Float, Double, Bcd, + Bcd32, Ulong, Long]: + logger.warn("import iec104 client csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if "enableBit" in val: + if val["enableBit"] == 1: + logger.warn("import iec104 client csv error: invalid enableBit: %d" % val["enableBit"]) + return self.response_error_params(count_line, "EnableBit is 1", topic, -11008) + if val["readWrite"] not in ('ro', 'wo', 'rw'): + logger.warn("import iec104 client csv error: invalid Read Write: %s" % val["readWrite"]) + return self.response_error_params(count_line, "Read Write: " + val["readWrite"], topic, -11008) + elif type == "YX": + if "enableBit" in val: + if val["enableBit"] != 1: + logger.warn("import iec104 client csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + elif val["dataType"] not in [Bool, Bit]: + logger.warn("import iec104 client csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["readWrite"] not in ('ro', 'wo', 'rw'): + logger.warn("import iec104 client csv error: invalid Read Write: %s" % val["readWrite"]) + return self.response_error_params(count_line, "Read Write: " + val["readWrite"], topic, -11008) + elif type == "YK": + if val["dataType"] not in [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, Float, + Double, Bcd, + Bcd32, + Ulong, Long]: + logger.warn("import iec104 client csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + if val["readWrite"] not in ('ro', 'wo', 'rw'): + logger.warn("import iec104 client csv error: invalid Read Write: %s" % val["readWrite"]) + return self.response_error_params(count_line, "Read Write: " + val["readWrite"], topic, -11008) + index = self.verify_kv_and_get_index(measures, "name", (val["measureName"]), fk="ctrlName", fv=(val["ctrlName"])) + if index is None: + error_val = "ctrlName: " + val["ctrlName"] + " measureName: " + val["measureName"] + logger.warn("import iec104 client csv error: invalid: %s" % error_val) + return self.response_error_params(count_line, error_val, topic, -11008) + if val["dataType"] != measures[index]["dataType"]: + logger.warn("import iec104 client csv error: invalid Data Type: %s" % val["dataType"]) + return self.response_error_params(count_line, "Data Type: " + val["dataType"], topic, -11008) + simple_config[key] = val + + self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"][type] = simple_config + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_lables_csv(self, filename, topic): + count_line = 1 + lable_news = list() + if len(self.ucfg.labels) >= 2: + del self.ucfg.labels[2[:None]] + try: + with open(filename, "r", encoding="utf-8-sig") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + except Exception: + with open(filename, "r", encoding="gbk") as f: + csv_reader = list(csv.DictReader(f, delimiter=",")) + + for mp in csv_reader: + val = dict() + count_line += 1 + if "_Id" in mp: + val["_id"] = mp["_Id"] + val["key"] = mp["key"] + val["value"] = mp["value"] + if not val["key"] == "SN": + if val["key"] == "MAC": + logger.warn("import lables csv error: invalid key: %s" % val["key"]) + return self.response_error_params(count_line, "key: " + val["key"], topic, -11008) + if len(str(val["key"])) > 256: + logger.warn("import lables csv error: invalid key: %s" % val["key"]) + return self.response_error_params(count_line, "key: " + val["key"], topic, -11008) + if len(str(val["value"])) > 1024: + logger.warn("import lables csv error: invalid value: %s" % val["value"]) + return self.response_error_params(count_line, "value: " + val["value"], topic, -11008) + for vn in lable_news: + if vn["key"] == val["key"]: + error_val = "Repeated key: " + val["key"] + logger.warn("import lables csv error: invalid: %s" % error_val) + return self.response_error_params(count_line, error_val, topic, -11008) + + lable_news.append(val) + + if len(lable_news) == 0: + logger.warn("Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + self.ucfg.labels[2[:None]] = lable_news + simple_config = dict() + self.schema.simple_config["device_supervisor"]["labels"].clear() + simple_config_map_array(self.ucfg.labels, simple_config, "labels", self.schema.simple_config_indexs, "key") + payload = dict() + payload.update({"device_supervisor": simple_config}) + self.schema.merge_patch(payload) + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def import_genericfuncs_csv(self, filename, topic): + """ + """ + with open(filename, "r", encoding="utf-8-sig") as f: + info = json.load(f) + genfuns = info["genericFuncs"] + if len(genfuns) == 0: + logger.warn("Unuseful data") + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11025])) + return self.build_response_data(data, topic) + for gf in genfuns: + res_data = self.verify_gen_post(gf) + if res_data and "error" in res_data: + logger.warn("import genericfuncs csv error: invalid: %s" % res_data["params"]) + return self.build_response_data(res_data, topic) + + self.schema.simple_config["device_supervisor"]["quickfaas"]["genericFuncs"].clear() + self.schema.simple_config["device_supervisor"]["quickfaas"]["genericFuncs"] = genfuns + self.on_cfg_editing() + data = self.wrap_response_data("ok") + self.build_response_data(data, topic) + + def on_export_file(self, topic, payload): + try: + logger.debug("master receive topic: %s , payload: %s" % ( + topic, payload)) + payload = self.payload_transfer(payload) + if not os.path.exists(CONFIG_TMP_PATH): + os.makedirs(CONFIG_TMP_PATH, 493, exist_ok=True) + else: + file_type = payload["fileType"] + if file_type == "measure": + ctr_name = payload["ctrlName"] + self.export_measure_csv(ctr_name, topic) + else: + if file_type == "group": + self.export_group_csv(topic) + else: + if file_type == "history_data": + return + elif file_type == "alarm": + self.export_alarm_csv(topic) + else: + if file_type == "alarmLabel": + self.export_alarmLables_csv(topic) + else: + if file_type == "cloud": + cld_name = payload["cloudName"] + self.export_cloud_csv(cld_name, topic) + else: + if file_type == "cloud_measure": + cld_name = payload["cloudName"] + self.export_cloud_measure_csv(cld_name, topic) + else: + if file_type == "modbus_slave": + mappingTableName = payload["mappingTableName"] + self.export_modbus_slave_csv(topic, mappingTableName, False) + else: + if file_type == "modbusRTU_slave": + mappingTableName = payload["mappingTableName"] + self.export_modbus_slave_csv(topic, mappingTableName, True) + else: + if file_type == "iec104": + self.export_iec104_csv(topic) + else: + if file_type == "opcua_pubsub_group": + self.export_opcua_pubsub_group_csv(topic) + else: + if file_type == "iec101Slave_YX": + self.export_iec101Slave_csv(topic, "YX") + else: + if file_type == "iec101Slave_YC": + self.export_iec101Slave_csv(topic, "YC") + else: + if file_type == "iec101Slave_YK": + self.export_iec101Slave_csv(topic, "YK") + else: + if file_type == "sl651Slave": + self.export_sl651_csv(topic) + else: + if file_type == "hj212Client": + self.export_hj212_csv(topic) + else: + if file_type == "bacnetServer": + self.export_bacnet_server_csv(topic) + else: + if file_type == "Dnp3Server": + self.export_dnp3_server_csv(topic) + else: + if file_type == "iec61850Server": + self.export_iec61850_server_csv(topic) + else: + if file_type == "snmpAgent": + self.export_snmp_agent_csv(topic) + else: + if file_type == "iec104Client_YX": + self.export_iec104Client_csv(topic, "YX") + else: + if file_type == "iec104Client_YC": + self.export_iec104Client_csv(topic, "YC") + else: + if file_type == "iec104Client_YK": + self.export_iec104Client_csv(topic, "YK") + else: + if file_type == "labels": + self.export_labels_csv(topic) + else: + if file_type == "genfuns": + self.export_genericfuncs_csv(topic) + else: + if file_type == "pkgdbg": + ctr_name = payload["ctrlName"] + self.export_pkgdbg_log(ctr_name, topic) + else: + if file_type == "global_config": + self.export_global_config(topic) + else: + if file_type == "log": + request_id = payload["id"] + self.export_log(topic, request_id) + else: + raise ValueError("Unknown fileType %s" % file_type) + except Exception as e: + try: + logger.warn("Response error: %s" % e) + self.pub_invalid_response_by_params(topic, -1003) + finally: + e = None + del e + + def export_measure_csv(self, ctr_name, topic): + controllers = list(self.schema.simple_config["device_supervisor"]["controllers"].values()) + ind = self.verify_kv_and_get_index(controllers, "name", ctr_name) + if ind is None: + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[-11014])) + return self.build_response_data(data, topic) + schema_meas = self.schema.simple_config["device_supervisor"]["measures"][ctr_name] + csv_file = CONFIG_TMP_PATH + "/" + MEASURE_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'MeasuringPointName', 'ControllerName', 'GroupName', + 'UploadType', 'DeadZonePercent', + 'DataType', + 'ArrayIndex', 'EnableBit', 'BitIndex', 'reverseBit', + 'Address', 'Decimal', 'Len', + 'CodeType', + 'ReadWrite', 'Unit', 'Description', 'Transform Type', + 'MaxValue', 'MinValue', + 'MaxScale', + 'MinScale', 'Gain', 'Offset', 'startBit', 'endBit', + 'Pt', 'Ct', 'Mapping_table', + 'TransDecimal', + 'bitMap', 'msecSample', 'storageLwTSDB', 'DataEndianReverse', + 'ReadOffset', + 'ReadLength', + 'WriteOffset', 'WriteLength', 'DataParseMethod', 'BitId', + 'pollCycle'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for val in list(schema_meas.values()): + if val["ctrlName"] == ctr_name: + mea = dict() + mea["MeasuringPointName"] = val["name"] + mea["ControllerName"] = val["ctrlName"] + mea["UploadType"] = val["uploadType"] + if val["uploadType"] in ('periodic', 'onChange'): + mea["GroupName"] = val["group"] + mea["DataType"] = val["dataType"] + if val["dataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long]: + mea["EnableBit"] = val["enableBit"] + if mea["EnableBit"] == 1: + mea["BitIndex"] = val["bitIndex"] + mea["bitMap"] = val["bitMap"] + mea["reverseBit"] = val["reverseBit"] + if val["dataType"] == Bit: + mea["reverseBit"] = val["reverseBit"] + mea["ReadWrite"] = val["readWrite"] + if val["uploadType"] == OnChange: + if "enableBit" in val: + if val["enableBit"] == 0: + mea["DeadZonePercent"] = val["deadZonePercent"] + elif val["dataType"] in [Float, Double]: + mea["DeadZonePercent"] = val["deadZonePercent"] + addr = str(val["addr"]) + if controllers[ind]["protocol"] in [MbRtu, MbTcp, MbAscii, MbRtuOverTcp, MbRtuSlave]: + if addr[0] == "0": + addr = "0X" + addr[1[:None]] + else: + if addr[0] == "1": + addr = "1X" + addr[1[:None]] + else: + if addr[0] == "3": + addr = "3X" + addr[1[:None]] + else: + if addr[0] == "4": + addr = "4X" + addr[1[:None]] + if controllers[ind]["protocol"] in [BacMstp, BacBip]: + addr_list = addr.split(",") + object_type = self.bacnet_object_type_num_to_letter(addr_list[0]) + addr = addr.replace(addr_list[0], object_type, 1) + else: + mea["Address"] = addr + if controllers[ind]["protocol"] in [Easycom, EasyEthernet]: + mea["DataEndianReverse"] = val["data_endian_reverse"] + if mea["ReadWrite"] in ('rw', 'ro'): + mea["ReadOffset"] = val["rd_offset"] + mea["ReadLength"] = val["rd_length"] + if mea["ReadWrite"] in ('rw', 'wo'): + mea["WriteOffset"] = val["wr_offset"] + mea["WriteLength"] = val["wr_length"] + mea["DataParseMethod"] = val["data_parse_method"] + if val["dataType"] == Bit: + mea["BitId"] = val["bit_id"] + if val["dataType"] in [Float, Double]: + mea["Decimal"] = val["decimal"] if "decimal" in val else "" + elif val["dataType"] == String: + mea["Len"] = val["len"] if "len" in val else 1 + mea["CodeType"] = val["codeType"] if "codeType" in val else "UTF-8" + else: + mea["Unit"] = val["unit"] + mea["Description"] = val["desc"] + trsform = val["transformType"] + if trsform == 5: + mea["Transform Type"] = "numMapping" + mea["Mapping_table"] = val["mapping_table"] + else: + if trsform == 4: + mea["Transform Type"] = "ptct" + mea["Gain"] = val["gain"] + mea["Offset"] = val["offset"] + mea["Pt"] = val["pt"] + mea["Ct"] = val["ct"] + mea["TransDecimal"] = val["transDecimal"] + else: + if trsform == 3: + mea["Transform Type"] = "bit" + mea["startBit"] = val["startBit"] + mea["endBit"] = val["endBit"] + else: + if trsform == 2: + mea["Transform Type"] = "zoom" + mea["Gain"] = val["gain"] + mea["Offset"] = val["offset"] + mea["TransDecimal"] = val["transDecimal"] + else: + if trsform == 1: + mea["Transform Type"] = "gain" + mea["MaxValue"] = val["maxValue"] + mea["MinValue"] = val["minValue"] + mea["MaxScale"] = val["maxScaleValue"] + mea["MinScale"] = val["minScaleValue"] + mea["TransDecimal"] = val["transDecimal"] + else: + mea["Transform Type"] = "none" + if val["dataType"] == "BIT": + mea["bitMap"] = val["bitMap"] + if "msecSample" in val: + mea["msecSample"] = val["msecSample"] + if mea["UploadType"] != "never": + mea["storageLwTSDB"] = val["storageLwTSDB"] + if "arrayIndex" in val: + mea["ArrayIndex"] = val["arrayIndex"] + if "pollCycle" in val: + mea["pollCycle"] = val["pollCycle"] + f_csv.writerow(mea) + + self.build_response_data(csv_file, topic) + + def bacnet_object_type_num_to_letter(self, object_type): + if object_type == "0": + return "AI" + if object_type == "1": + return "AO" + if object_type == "2": + return "AV" + if object_type == "3": + return "BI" + if object_type == "4": + return "BO" + if object_type == "5": + return "BV" + if object_type == "13": + return "MSI" + if object_type == "14": + return "MSO" + if object_type == "19": + return "MSV" + if object_type == "12": + return "LOOP" + if object_type == "20": + return "TRENDLOG" + if object_type == "128": + return "PROPRIETARY_MIN" + logger.warn("parse object_type num to letter fail") + return + + def bacnet_object_type_letter_to_num(self, object_type): + if object_type == "AI": + return "0" + if object_type == "AO": + return "1" + if object_type == "AV": + return "2" + if object_type == "BI": + return "3" + if object_type == "BO": + return "4" + if object_type == "BV": + return "5" + if object_type == "MSI": + return "13" + if object_type == "MSO": + return "14" + if object_type == "MSV": + return "19" + if object_type == "LOOP": + return "12" + if object_type == "TRENDLOG": + return "20" + if object_type == "PROPRIETARY_MIN": + return "128" + logger.warn("parse object_type letter to num fail") + return + + def bacnet_object_property_num_to_letter(self, object_property): + if object_property == "85": + return "present-value" + if object_property == "75": + return "object-identifier" + if object_property == "77": + return "object-name" + if object_property == "87": + return "priority-array" + if object_property == "104": + return "relinquish-default" + logger.warn("parse object_property num to letter fail") + return + + def bacnet_object_property_letter_to_num(self, object_property): + if object_property == "present-value": + return "85" + if object_property == "object-identifier": + return "75" + if object_property == "object-name": + return "77" + if object_property == "priority-array": + return "87" + if object_property == "relinquish-default": + return "104" + logger.warn("parse object_property letter to num fail") + return + + def export_group_csv(self, topic): + groups = list(self.schema.simple_config["device_supervisor"]["groups"].values()) + csv_file = CONFIG_TMP_PATH + "/" + GROUP_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'GroupName', 'UploadInterval', 'EnablePerOnchange', + 'OnchangePeriod', 'LwTSDBSize', + 'strategy', + 'storagePeriod', 'historyDataMode', 'historyDataPath'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for g in groups: + gg = dict() + gg["GroupName"] = g["name"] + gg["UploadInterval"] = g["uploadInterval"] + gg["EnablePerOnchange"] = g["enablePerOnchange"] + if gg["EnablePerOnchange"] == 1: + gg["OnchangePeriod"] = g["onchangePeriod"] + gg["LwTSDBSize"] = g["LwTSDBSize"] + gg["strategy"] = g["strategy"] + if gg["strategy"] == 2: + gg["storagePeriod"] = g["storagePeriod"] + gg["historyDataMode"] = g["historyDataMode"] + gg["historyDataPath"] = g["historyDataPath"] + f_csv.writerow(gg) + + self.build_response_data(csv_file, topic) + + def export_opcua_pubsub_group_csv(self, topic): + pubsub_group = list(self.schema.simple_config["device_supervisor"]["opcuaServer"]["pubsub_group"].values()) + csv_file = CONFIG_TMP_PATH + "/" + OPCUA_PUB_SUB_GROUP_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + "GroupType", "GroupId"] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for g in pubsub_group: + gg = dict() + gg["GroupType"] = g["group_type"] + gg["GroupId"] = g["group_id"] + f_csv.writerow(gg) + + self.build_response_data(csv_file, topic) + + def export_alarm_csv(self, topic): + alarms = list(self.schema.simple_config["device_supervisor"]["alarms"].values()) + csv_file = CONFIG_TMP_PATH + "/" + ALARM_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'AlarmName', 'ControllerName', 'MeasuringPointName', + 'AlarmLevel', + 'Condition1', + 'Operand1', 'CombineMethod', + 'Condition2', + 'Operand2', 'AlarmContent', 'AlarmTag'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for a in alarms: + alm = dict() + alm["AlarmName"] = a["name"] + alm["ControllerName"] = a["ctrlName"] + alm["MeasuringPointName"] = a["measureName"] + alm["AlarmLevel"] = a["alarmLevel"] + alm["Condition1"] = a["cond1"]["op"] + alm["Operand1"] = a["cond1"]["value"] + alm["CombineMethod"] = a["condOp"] + alm["Condition2"] = a["cond2"]["op"] + alm["Operand2"] = a["cond2"]["value"] + alm["AlarmContent"] = a["content"] + alm["AlarmTag"] = a["alarmLable"] + f_csv.writerow(alm) + + self.build_response_data(csv_file, topic) + + def export_alarmLables_csv(self, topic): + schema_alalabs = self.schema.simple_config["device_supervisor"]["alarmLables"] + csv_file = CONFIG_TMP_PATH + "/" + ALARMLABLES_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + "AlarmLableName"] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for alalab in schema_alalabs: + al = dict() + al["AlarmLableName"] = alalab + f_csv.writerow(al) + + self.build_response_data(csv_file, topic) + + def export_cloud_csv(self, cld_name, topic): + csv_file = CONFIG_TMP_PATH + "/" + CLOUD_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + config_data = dict() + for cloud in list(self.schema.simple_config["device_supervisor"]["clouds"].values()): + if cloud["name"] == cld_name: + config_data["cloud"] = copy.deepcopy(cloud) + del config_data["cloud"]["name"] + if "_id" in config_data["cloud"]: + del config_data["cloud"]["_id"] + break + + if config_data["cloud"]["type"] not in [CloudTypeErlang, CloudTypeBaiYing]: + config_data["quickfaas"] = dict() + config_data["quickfaas"]["uploadFuncs"] = list() + for cloudName, items in self.schema.simple_config["device_supervisor"]["quickfaas"]["uploadFuncs"].items(): + if cloudName == cld_name: + for item in items: + pub = copy.deepcopy(item) + del pub["cloudName"] + config_data["quickfaas"]["uploadFuncs"].append(pub) + + break + + config_data["quickfaas"]["downloadFuncs"] = list() + for cloudName, items in self.schema.simple_config["device_supervisor"]["quickfaas"]["downloadFuncs"].items(): + if cloudName == cld_name: + for item in items: + sub = copy.deepcopy(item) + del sub["cloudName"] + config_data["quickfaas"]["downloadFuncs"].append(sub) + + break + + import rapidjson + rapidjson.dump(config_data, f, indent=1, ensure_ascii=False) + self.build_response_data(csv_file, topic) + + def export_cloud_measure_csv(self, cld_name, topic): + clouds = list(self.schema.simple_config["device_supervisor"]["clouds"].values()) + for cloud in clouds: + if cloud["name"] == cld_name: + if "uploadRules" in cloud: + uploadRules = list(cloud["uploadRules"]) + break + else: + uploadRules = list() + + csv_file = CONFIG_TMP_PATH + "/" + CLOUD_MEASURE_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'MeasuringPointName', 'ControllerName', 'GroupName', + 'UploadName', 'Hide'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for upload_rule in uploadRules: + value = dict() + value["MeasuringPointName"] = upload_rule["measureName"] + value["ControllerName"] = upload_rule["ctrlName"] + mea = self.find_measure_by_name(upload_rule["ctrlName"], upload_rule["measureName"]) + value["GroupName"] = mea["group"] + value["UploadName"] = upload_rule["uploadName"] + value["Hide"] = upload_rule["hide"] + f_csv.writerow(value) + + self.build_response_data(csv_file, topic) + + def export_modbus_slave_csv(self, topic, mappingTableName, is_modbusRTU=False): + if is_modbusRTU: + mappingTableKey, mappingTable = self.find_modbusSlave_mappingTable_by_name(mappingTableName, True) + map_tables = list(mappingTable["measures"].values()) + else: + mappingTableKey, mappingTable = self.find_modbusSlave_mappingTable_by_name(mappingTableName, False) + map_tables = list(mappingTable["measures"].values()) + csv_file = CONFIG_TMP_PATH + "/" + SLAVE_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Point Name', 'Controller Name', + 'readWrite', + 'Data Type', + 'Mapping Data Type', 'EnableBit', 'Start Mapping Address'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Point Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["readWrite"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + addr = str(mt["startMapAddr"]) + if addr[0] == "0": + addr = "0X" + addr[1[:None]] + else: + if addr[0] == "1": + addr = "1X" + addr[1[:None]] + else: + if addr[0] == "3": + addr = "3X" + addr[1[:None]] + else: + if addr[0] == "4": + addr = "4X" + addr[1[:None]] + map_measure["Start Mapping Address"] = addr + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_iec104_csv(self, topic): + map_tables = list(self.schema.simple_config["device_supervisor"]["iec104Server"]["mapping_table"].values()) + csv_file = CONFIG_TMP_PATH + "/" + IEC104_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Point Name', 'Controller Name', + 'readWrite', 'Data Type', + 'Mapping Data Type', + 'EnableBit', 'ASDU', 'Type Id', 'startIOA'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Point Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["readWrite"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + map_measure["ASDU"] = mt["asduAddr"] + typeId = 0 + typeId = mt["typeId"][1] + if typeId == 1: + typeId = "[" + str(typeId) + "] " + "M_SP_NA_1" + else: + if typeId == 30: + typeId = "[" + str(typeId) + "] " + "M_SP_TB_1" + else: + if typeId == 3: + typeId = "[" + str(typeId) + "] " + "M_DP_NA_1" + else: + if typeId == 31: + typeId = "[" + str(typeId) + "] " + "M_DP_TB_1" + else: + if typeId == 5: + typeId = "[" + str(typeId) + "] " + "M_ST_NA_1" + else: + if typeId == 32: + typeId = "[" + str(typeId) + "] " + "M_ST_TB_1" + else: + if typeId == 7: + typeId = "[" + str(typeId) + "] " + "M_BO_NA_1" + else: + if typeId == 33: + typeId = "[" + str(typeId) + "] " + "M_BO_TB_1" + else: + if typeId == 9: + typeId = "[" + str(typeId) + "] " + "M_ME_NA_1" + else: + if typeId == 21: + typeId = "[" + str(typeId) + "] " + "M_ME_ND_1" + else: + if typeId == 34: + typeId = "[" + str(typeId) + "] " + "M_ME_TD_1" + else: + if typeId == 11: + typeId = "[" + str(typeId) + "] " + "M_ME_NB_1" + else: + if typeId == 35: + typeId = "[" + str(typeId) + "] " + "M_ME_TE_1" + else: + if typeId == 13: + typeId = "[" + str(typeId) + "] " + "M_ME_NC_1" + else: + if typeId == 36: + typeId = "[" + str(typeId) + "] " + "M_ME_TF_1" + else: + if typeId == 15: + typeId = "[" + str(typeId) + "] " + "M_IT_NA_1" + else: + if typeId == 37: + typeId = "[" + str(typeId) + "] " + "M_IT_TB_1" + else: + if typeId == 110: + typeId = "[" + str(typeId) + "] " + "P_ME_NA_1" + else: + if typeId == 111: + typeId = "[" + str(typeId) + "] " + "P_ME_NB_1" + else: + if typeId == 112: + typeId = "[" + str(typeId) + "] " + "P_ME_NC_1" + else: + if typeId == 45: + typeId = "[" + str(typeId) + "] " + "C_SC_NA_1" + else: + if typeId == 58: + typeId = "[" + str(typeId) + "] " + "C_SC_TA_1" + else: + if typeId == 46: + typeId = "[" + str(typeId) + "] " + "C_DC_NA_1" + else: + if typeId == 59: + typeId = "[" + str(typeId) + "] " + "C_DC_TA_1" + else: + if typeId == 47: + typeId = "[" + str(typeId) + "] " + "C_RC_NA_1" + else: + if typeId == 60: + typeId = "[" + str(typeId) + "] " + "C_RC_TA_1" + else: + if typeId == 48: + typeId = "[" + str(typeId) + "] " + "C_SE_NA_1" + else: + if typeId == 61: + typeId = "[" + str(typeId) + "] " + "C_SE_TA_1" + else: + if typeId == 49: + typeId = "[" + str(typeId) + "] " + "C_SE_NB_1" + else: + if typeId == 62: + typeId = "[" + str(typeId) + "] " + "C_SE_TB_1" + else: + if typeId == 50: + typeId = "[" + str(typeId) + "] " + "C_SE_NC_1" + else: + if typeId == 63: + typeId = "[" + str(typeId) + "] " + "C_SE_TC_1" + else: + if typeId == 51: + typeId = "[" + str(typeId) + "] " + "C_BO_NA_1" + else: + if typeId == 64: + typeId = "[" + str(typeId) + "] " + "C_BO_TA_1" + map_measure["Type Id"] = typeId + map_measure["startIOA"] = mt["startMapAddr"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_sl651_csv(self, topic): + map_tables = list(self.schema.simple_config["device_supervisor"]["sl651Slave"]["mapping_table"].values()) + csv_file = CONFIG_TMP_PATH + "/" + SL651_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Point Name', 'Controller Name', + 'readWrite', 'Data Type', + 'Mapping Data Type', + 'EnableBit', 'Identifi'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Point Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["readWrite"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + map_measure["Identifi"] = mt["identifi"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_hj212_csv(self, topic): + map_tables = list(self.schema.simple_config["device_supervisor"]["hj212Client"]["mapping_table"].values()) + csv_file = CONFIG_TMP_PATH + "/" + HJ212_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Name', 'Controller Name', 'readWrite', + 'Data Type', + 'Mapping Data Type', + 'EnableBit', 'Encode', 'Block'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["readWrite"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + map_measure["Encode"] = mt["encode"] + map_measure["Block"] = mt["block"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_bacnet_server_csv(self, topic): + map_tables = list(self.schema.simple_config["device_supervisor"]["bacnetServer"]["mapping_table"].values()) + csv_file = CONFIG_TMP_PATH + "/" + BACNET_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Control Name', 'Measuring Point Name', 'Read/Write', + 'Data Type', + 'Mapping Data Type', + 'Enable Bit', 'Object Type', 'Instance Number'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Control Name"] = mt["ctrlName"] + map_measure["Measuring Point Name"] = mt["measureName"] + map_measure["Read/Write"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["Enable Bit"] = mt["enableBit"] + map_measure["Object Type"] = mt["objectType"] + map_measure["Instance Number"] = mt["objectInstance"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_dnp3_server_csv(self, topic): + map_tables = list(self.schema.simple_config["device_supervisor"]["Dnp3Server"]["mapping_table"].values()) + csv_file = CONFIG_TMP_PATH + "/" + DNP3_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Name', 'Controller Name', 'readWrite', + 'Data Type', + 'Mapping Data Type', + 'EnableBit', 'Object Type', 'Instance Number'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["readWrite"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + map_measure["Object Type"] = mt["addrType"] + map_measure["Instance Number"] = mt["addr"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_iec61850_server_csv(self, topic): + map_tables = list(self.schema.simple_config["device_supervisor"]["iec61850Server"]["mapping_table"].values()) + csv_file = CONFIG_TMP_PATH + "/" + IEC61850_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Name', 'Controller Name', 'readWrite', + 'Data Type', 'Mapping Data Type', + 'EnableBit', + 'CDC Type', 'Data Attribute Reference', 'Add Data Set', + 'Data Set Name'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["readWrite"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + map_measure["CDC Type"] = mt["cdcType"] + map_measure["Data Attribute Reference"] = mt["daRef"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + map_measure["Add Data Set"] = mt["addDataSet"] + if map_measure["Add Data Set"] == 1: + map_measure["Data Set Name"] = mt["dataSetName"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_snmp_agent_csv(self, topic): + map_tables = list(self.schema.simple_config["device_supervisor"]["snmpAgent"]["mapping_table"].values()) + csv_file = CONFIG_TMP_PATH + "/" + SNMP_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Name', 'Controller Name', 'readWrite', + 'Data Type', + 'Mapping Data Type', + 'EnableBit', 'OID'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["readWrite"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + map_measure["OID"] = mt["oid"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_iec101Slave_csv(self, topic, type): + map_tables = list(self.schema.simple_config["device_supervisor"]["iec101Server"]["mappingTable"][type].values()) + csv_file = CONFIG_TMP_PATH + "/" + IEC101_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Point Name', 'Controller Name', + 'readWrite', 'Data Type', + 'Mapping Data Type', + 'EnableBit', 'ASDU', 'Type Id', 'startIOA'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Point Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["readWrite"] = mt["readWrite"] + map_measure["Data Type"] = mt["dataType"] + map_measure["Mapping Data Type"] = mt["northDataType"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + map_measure["ASDU"] = mt["asduAddr"] + typeId = 0 + typeId = mt["typeId"][1] + if typeId == 1: + typeId = "[" + str(typeId) + "] " + "M_SP_NA_1" + else: + if typeId == 13: + typeId = "[" + str(typeId) + "] " + "M_ME_NC_1" + else: + if typeId == 45: + typeId = "[" + str(typeId) + "] " + "C_SC_NA_1" + map_measure["Type Id"] = typeId + map_measure["startIOA"] = mt["startMapAddr"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_iec104Client_csv(self, topic, type): + map_tables = list(self.schema.simple_config["device_supervisor"]["iec104Client"]["mapping_table"][type].values()) + csv_file = CONFIG_TMP_PATH + "/" + IEC104_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + 'Measuring Point Name', 'Controller Name', + 'Data Type', 'EnableBit', 'Read Write'] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in map_tables: + map_measure = dict() + map_measure["Measuring Point Name"] = mt["measureName"] + map_measure["Controller Name"] = mt["ctrlName"] + map_measure["Data Type"] = mt["dataType"] + if map_measure["Data Type"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + map_measure["EnableBit"] = mt["enableBit"] + map_measure["Read Write"] = mt["readWrite"] + f_csv.writerow(map_measure) + + self.build_response_data(csv_file, topic) + + def export_labels_csv(self, topic): + label_tables = list(self.schema.simple_config["device_supervisor"]["labels"].values()) + csv_file = CONFIG_TMP_PATH + "/" + LABELS_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + csv_headers = [ + "key", "value"] + f_csv = csv.DictWriter(f, csv_headers, dialect="excel") + f_csv.writeheader() + for mt in label_tables[2[:None]]: + labels = dict() + labels["key"] = mt["key"] + labels["value"] = mt["value"] + f_csv.writerow(labels) + + self.build_response_data(csv_file, topic) + + def export_genericfuncs_csv(self, topic): + schema_genFuns = self.schema.simple_config["device_supervisor"]["quickfaas"]["genericFuncs"] + csv_file = CONFIG_TMP_PATH + "/" + GENFUNS_CSV_FILE_NAME + with open(csv_file, "w+", encoding="utf-8-sig") as f: + config_data = dict() + config_data["genericFuncs"] = list() + for gf in schema_genFuns: + genfuns = copy.deepcopy(gf) + config_data["genericFuncs"].append(genfuns) + + import rapidjson + rapidjson.dump(config_data, f, indent=1, ensure_ascii=False) + self.build_response_data(csv_file, topic) + + def _merge_debuglog_files(self, file1, file2, output_file): + try: + with open(output_file, "w") as output: + if os.path.exists(file1): + shutil.copyfileobj(open(file1, "r"), output) + if os.path.exists(file2): + shutil.copyfileobj(open(file2, "r"), output) + except Exception as error: + try: + logger.warn("Merge debugLog file failed(%s)" % error) + finally: + error = None + del error + + def export_pkgdbg_log(self, ctrlName, topic): + misc = self.schema.simple_config["device_supervisor"]["misc"] + from master.Services import DriverService + for service in self.master.service_manager.services: + if isinstance(service, DriverService): + if ctrlName in service.controller_names: + if service.driverkey == "DriverAgent": + debug_log_file = "%s/%s.log" % (misc["debugLogPath"], ctrlName) + else: + controller = self.find_ctl_name(ctrlName) + if controller is False: + logger.warn("controller(%s) not found" % ctrlName) + break + debug_log_file = "%s/%s_%s.log" % (misc["debugLogPath"], controller["protocol"], service.service_id) + self._merge_debuglog_files(debug_log_file + ".1", debug_log_file, debug_log_file + ".export") + self.build_response_data(debug_log_file + ".export", topic) + break + + def export_global_config(self, topic): + global_config_path = "/tmp/files/export/device_supervisor.cfg" + cmd = "mkdir -p /tmp/files/export; cp %s %s" % (self.cfg.filename, global_config_path) + os.system(cmd) + self.build_response_data(global_config_path, topic) + + def export_log(self, topic, request_id): + export_file = "%s/log/device_supervisor-%s.tar.gz" % (self.cfg.app_base_path, request_id) + self.build_response_data(export_file, topic) + + def cloud_status(self, topic, payload): + status, timestamp = (0, 0) + try: + payload = Utilities.payload_transfer(payload) + for _, cloud in self.schema.simple_config["device_supervisor"]["clouds"].items(): + if cloud["type"] in [CloudTypeErlang, CloudTypeBaiYing]: + file_path = self.cfg.app_run_base_path + "/ds2/erlang_status.json" + else: + file_path = self.cfg.app_run_base_path + "/ds2/" + cloud["name"] + "_status.json" + if cloud["name"] == payload["name"]: + if cloud["enable"]: + if os.path.exists(file_path): + with open(file_path, "r", encoding="utf-8") as f: + cont = json.load(f) + status = int(cont["status"]) + if status == 2: + import psutil + timestamp = int(time.time()) - int(psutil.boot_time()) - int(cont["timestamp"]) + break + + except Exception as error: + try: + logger.warn("Load cloud status failed(%s)" % error) + finally: + error = None + del error + + self.build_response_data(self.wrap_response_data({'status':status, 'connTime':timestamp}), topic) + + def iec104client_status(self, topic, payload): + status, timestamp = (0, 0) + try: + payload = Utilities.payload_transfer(payload) + iec104Client_enable = self.schema.simple_config["device_supervisor"]["iec104Client"]["config"]["enable"] + file_path = self.cfg.app_run_base_path + "/ds2/iec104Client_status.json" + if iec104Client_enable: + if os.path.exists(file_path): + with open(file_path, "r", encoding="utf-8") as f: + cont = json.load(f) + status = int(cont["status"]) + if status == 2: + import psutil + timestamp = int(time.time()) - int(psutil.boot_time()) - int(cont["timestamp"]) + except Exception as error: + try: + logger.warn("Load iec104client status failed(%s)" % error) + finally: + error = None + del error + + self.build_response_data(self.wrap_response_data({'status':status, 'connTime':timestamp}), topic) + + def north_basic_status(self, topic, payload): + try: + payload_trans = Utilities.payload_transfer(payload) + file_path = self.cfg.app_run_base_path + "/ds2/" + payload_trans["service"] + "_status.json" + if os.path.exists(file_path): + with open(file_path, "r", encoding="utf-8") as f: + cont = json.load(f) + result = cont + import psutil + start_time = result["service_status"]["runtime"] + if result["service_status"]["status"] != 0: + result["service_status"]["runtime"] = int(time.time()) - int(psutil.boot_time()) - int(start_time) + else: + result["service_status"]["runtime"] = 0 + for link in result["link_status"]: + start_time = link["linktime"] + if link["status"] != 0: + link["linktime"] = int(time.time()) - int(psutil.boot_time()) - int(start_time) + else: + link["linktime"] = 0 + + else: + result = {'service_status':{'status':0, + 'runtime':0}, + 'link_status':[]} + except Exception as error: + try: + result = {'service_status':{'status':0, + 'runtime':0}, + 'link_status':[]} + logger.warn("Load North Basic Status failed(%s)" % error) + finally: + error = None + del error + + self.build_response_data(self.wrap_response_data(result), topic) + + def build_response_data(self, data, topic): + try: + topic = topic.replace(WEB_REQUEST_PREFIX, WEB_RESPONSE_PREFIX) + if not isinstance(data, (str, bytes)): + data = json.dumps(data) + self.mqchannel.publish(topic, data, qos=(MqttSetting.MQTT_QOS_LEVEL)) + except Exception as e: + try: + logger.error("build response error : %s, data: %s, topic: %s" % (e, data, topic)) + finally: + e = None + del e + + def wrap_response_data(self, results): + """ + :param results: "ok"/ "failed"/ dict data + :return: + """ + data = {"result": results} + return data + + def pub_invalid_response_by_params(self, topic, error_code, has_params=None, params=''): + if has_params: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[error_code])) + data["params"] = params + else: + data = dict(zip(self.ERROR_KEYS, self.ERROR_VALUES_DICT[error_code])) + self.build_response_data(data, topic) + + def verify_kv_and_get_index(self, list_item, key, value, fk='', fv=''): + """ + + :param list_item: + :param key: + :param value: + :param fk: filter ker + :param fv: filter value + :return: + """ + ind = None + for it in list_item: + if value != it[key]: + continue + if fk and fv: + if it[fk] == fv: + ind = list_item.index(it) + break + else: + ind = list_item.index(it) + break + + return ind + + def verify_isNumLeters(self, inp='', check_id=''): + if check_id == "key": + alphas = string.ascii_letters + "_" + nums = string.digits + if len(inp) >= 0: + if inp[0] not in alphas: + return 1 + for otherChar in inp[1[:None]]: + if otherChar not in alphas + nums: + return 2 + else: + return 0 + elif re.match("^[A-Za-z0-9 _-]*$", inp[0[:None]]): + return 0 + return 1 + + def transfer_measure_csv_headers(self, ctr_name, val): + """ + transfer to data that similar as web post + :param val: + :return: + """ + mea = dict() + if "_Id" in val: + mea["_id"] = val["_Id"] + mea["name"] = val["MeasuringPointName"] + mea["ctrlName"] = ctr_name + mea["group"] = val["GroupName"] + mea["uploadType"] = val["UploadType"] + mea["dataType"] = val["DataType"] + mea["addr"] = val["Address"] + if val["DataType"] in [Float, Double]: + mea["decimal"] = int(str(val["Decimal"]).strip()) + if val["DataType"] == String: + mea["len"] = int(str(val["Len"]).strip()) + mea["codeType"] = val["CodeType"] + if val["DataType"] == Bit: + mea["bitMap"] = int(val["bitMap"]) + mea["reverseBit"] = int(val["reverseBit"]) + if val["DataType"] in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, Long]: + mea["enableBit"] = int(val["EnableBit"]) + if int(val["EnableBit"]) == 1: + mea["bitIndex"] = int(val["BitIndex"]) + mea["bitMap"] = int(val["bitMap"]) + mea["reverseBit"] = int(val["reverseBit"]) + mea["readWrite"] = val["ReadWrite"] + mea["unit"] = val["Unit"] + mea["desc"] = val["Description"] + if mea["uploadType"] != "never": + mea["storageLwTSDB"] = int(val["storageLwTSDB"]) + elif mea["uploadType"] == OnChange: + if "enableBit" in mea: + if mea["enableBit"] == 0: + mea["deadZonePercent"] = val["DeadZonePercent"] + elif mea["dataType"] in [Float, Double]: + mea["deadZonePercent"] = val["DeadZonePercent"] + else: + trsform = val["Transform Type"] + if trsform == "none": + mea["transformType"] = 0 + else: + if trsform == "gain": + mea["transformType"] = 1 + mea["maxValue"] = val["MaxValue"] + mea["minValue"] = val["MinValue"] + mea["maxScaleValue"] = val["MaxScale"] + mea["minScaleValue"] = val["MinScale"] + mea["transDecimal"] = int(val["TransDecimal"]) + else: + if trsform == "zoom": + mea["transformType"] = 2 + mea["gain"] = val["Gain"] + mea["offset"] = val["Offset"] + mea["transDecimal"] = int(val["TransDecimal"]) + else: + if trsform == "bit": + mea["transformType"] = 3 + mea["startBit"] = int(val["startBit"]) + mea["endBit"] = int(val["endBit"]) + else: + if trsform == "ptct": + mea["transformType"] = 4 + mea["gain"] = val["Gain"] + mea["offset"] = val["Offset"] + mea["pt"] = val["Pt"] + mea["ct"] = val["Ct"] + mea["transDecimal"] = int(val["TransDecimal"]) + else: + if trsform == "numMapping": + mea["transformType"] = 5 + mea["mapping_table"] = eval(val["Mapping_table"]) + else: + raise ValueError("Unknown transformType %s" % trsform) + ctr = self.find_ctl_name(ctr_name) + if "enablepollCycle" in ctr: + if ctr["enablepollCycle"] == 1: + mea["pollCycle"] = int(val["pollCycle"]) if "pollCycle" in val else 0 + protocol = ctr["protocol"] + if protocol in [MbRtu, MbTcp, MbAscii, MbRtuOverTcp, MbRtuSlave]: + mea["addr"] = val["Address"].replace("X", "") + mea["msecSample"] = int(val["msecSample"]) if "msecSample" in val else 0 + else: + if protocol in [OpcUa, Snap7, OpcDa, OpcUa_PubSub]: + mea["msecSample"] = int(val["msecSample"]) if "msecSample" in val else 0 + if protocol in [OpcUa]: + mea["arrayIndex"] = int(val["ArrayIndex"]) if "ArrayIndex" in val else -1 + if protocol in [OpcUa_PubSub]: + mea["arrayIndex"] = int(val["ArrayIndex"]) if "ArrayIndex" in val else -1 + if "," in val["Address"]: + mea["sub_group_writeID"] = val["Address"].split(",")[0] + mea["pub_group_writeID"] = val["Address"].split(",")[1] + else: + mea["sub_group_writeID"] = val["Address"] + else: + if protocol in [Easycom, EasyEthernet]: + mea["data_endian_reverse"] = int(val["DataEndianReverse"]) + if mea["readWrite"] in ('rw', 'ro'): + mea["rd_offset"] = int(val["ReadOffset"]) + mea["rd_length"] = int(val["ReadLength"]) + if mea["readWrite"] in ('rw', 'wo'): + mea["wr_offset"] = int(val["WriteOffset"]) + mea["wr_length"] = int(val["WriteLength"]) + mea["data_parse_method"] = val["DataParseMethod"] + if val["DataType"] == Bit: + mea["bit_id"] = int(val["BitId"]) + elif protocol in [BacBip, BacMstp]: + addr_list = val["Address"].split(",") + object_type = self.bacnet_object_type_letter_to_num(addr_list[0]) + mea["addr"] = val["Address"].replace(addr_list[0], object_type) + else: + if protocol in [Eip, Snap7, SIPPI, MC1C, MC1C, MC3C, MC4C, MC1E, MC3E, + MCPS, + MC3COT, VRCON, OMFT, OMFU, OMHLS, + EipPCCC, DLT645_2007, + DLT645_1997, + PANMEW, TANCYV13, IEC101, IEC103, + IEC104, TATEKPROGRAM, TATEKPROGRAM_OverTcp, + SIFW, + SIPPI_OverTcp, SI_WebApi, MCR, MC3E_UDP, + MCFS_OverTcp, MCFL, MCFL_OverTcp, BacBip, + OMHL_OverTcp, + OMHLC, OMHLC_OverTcp, OMCN, OMCCN, + ABCC, ABMC, ABSLC, KEMC, KENano, + BacMstp, + KENano_OverTcp, + PANMC, PANMEW_OverTcp, BFADS, DTSerial, + DTAscii, DTTcp, DTSerial_OverTcp, + XJSerial, + XJSerial_OverTcp, XJTcpNet, XJIN, VGSerial, + VGSerial_OverTcp, FJSPB, FJSPB_OverTcp, + FJSPHNet, + GESRPT, YKGWLT, DLT645_OverTcp, DLT698, + IVSerial, IV_OverTcp, IV_TcpNet, CNCFS, + Euromap63, + DNP3Tcp, DNP3Udp, DNP3RTU, RobotEfort, + RobotAbb, RobotFanuc, Iec61850_MMS, + KeBaSocket, + CJT188_2004, Hj212_Serial, Hj2122005_Serial]: + pass + else: + raise KeyError("Unknown ctl protocol %s" % protocol) + return mea + + def computing_end_addr(self, ctl_name, mea_name, funcode, start_addr, data_type, enableBit, line, topic): + end_addr = "" + ctls = self.ucfg.controllers + ind = self.verify_kv_and_get_index(ctls, "name", ctl_name) + if ind is None: + logger.warn("import modbus slave csv error: Invalid Controller Name: %s" % ctl_name) + return self.response_error_params(line, "Controller Name: " + ctl_name, topic, -11008) + else: + ctl = ctls[ind] + protocol = ctl["protocol"] + measures = self.ucfg.measures + ind = self.verify_kv_and_get_index(measures, "name", mea_name, fk="ctrlName", fv=ctl_name) + if ind is None: + logger.warn("import modbus slave csv error: Invalid Measuring Point Name: %s" % mea_name) + return self.response_error_params(line, "Measuring Point Name: " + mea_name, topic, -11008) + else: + mea = measures[ind] + if protocol in [MbRtu, MbTcp, MbAscii, MbRtuOverTcp, OpcUa, OpcDa, Snap7, Eip, + MC1C, MC3C, MC4C, MC1E, MC3E, + MCPS, + MC3COT, VRCON, OMFT, OMFU, OMHLS, SIPPI, + EipPCCC, DLT645_2007, DLT645_1997, PANMEW, + TANCYV13, + IEC101, IEC103, IEC104, TATEKPROGRAM, TATEKPROGRAM_OverTcp, + Easycom, EasyEthernet, + SIFW, + SIPPI_OverTcp, SI_WebApi, MCR, MC3E_UDP, MCFS_OverTcp, + MCFL, MCFL_OverTcp, BacBip, + OMHL_OverTcp, + OMHLC, OMHLC_OverTcp, OMCN, OMCCN, ABCC, + ABMC, ABSLC, KEMC, KENano, BacMstp, + KENano_OverTcp, + PANMC, PANMEW_OverTcp, BFADS, DTSerial, DTAscii, + DTTcp, DTSerial_OverTcp, + XJSerial, + XJSerial_OverTcp, XJTcpNet, XJIN, VGSerial, + VGSerial_OverTcp, FJSPB, FJSPB_OverTcp, + FJSPHNet, + GESRPT, YKGWLT, DLT645_OverTcp, DLT698, IVSerial, + IV_OverTcp, IV_TcpNet, CNCFS, + Euromap63, + DNP3Tcp, DNP3Udp, DNP3RTU, RobotEfort, RobotAbb, + RobotFanuc, Iec61850_MMS, + KeBaSocket, + CJT188_2004, MbRtuSlave, Hj212_Serial, Hj2122005_Serial, + OpcUa_PubSub]: + if funcode in ('0', '1'): + if data_type == Bool: + end_addr = start_addr + elif data_type == Bit: + end_addr = start_addr + else: + if data_type in [Byte, Sint, Word, Int, Dword, Dint, Bcd, Bcd32, Ulong, + Long]: + if enableBit == 1: + end_addr = start_addr + else: + if data_type == Byte: + end_addr = str(int(start_addr) + 7) + else: + if data_type == Sint: + end_addr = str(int(start_addr) + 7) + else: + if data_type == Word: + end_addr = str(int(start_addr) + 15) + else: + if data_type == Int: + end_addr = str(int(start_addr) + 15) + else: + if data_type == Dword: + end_addr = str(int(start_addr) + 31) + else: + if data_type == Bcd32: + end_addr = str(int(start_addr) + 31) + else: + if data_type == Dint: + end_addr = str(int(start_addr) + 31) + else: + if data_type == Bcd: + end_addr = str(int(start_addr) + 7) + else: + if data_type == Ulong: + end_addr = str(int(start_addr) + 63) + else: + if data_type == Long: + end_addr = str(int(start_addr) + 63) + else: + if funcode in ('3', + '4'): + if data_type == Byte: + end_addr = start_addr + elif data_type == Sint: + end_addr = start_addr + else: + if data_type == Word: + end_addr = start_addr + else: + if data_type == Int: + end_addr = start_addr + else: + if data_type == Dword: + end_addr = str(int(start_addr) + 1) + else: + if data_type == Bcd32: + end_addr = str(int(start_addr) + 1) + else: + if data_type == Dint: + end_addr = str(int(start_addr) + 1) + else: + if data_type == Float: + end_addr = str(int(start_addr) + 1) + else: + if data_type == Double: + end_addr = str(int(start_addr) + 3) + else: + if data_type == String: + r_len = int(mea["len"]) + byte_len = (r_len - 1 if r_len % 2 == 0 else r_len) // 2 + logger.info(byte_len) + end_addr = str(int(start_addr) + byte_len) + else: + if data_type == Bcd: + end_addr = start_addr + else: + if data_type == String: + end_addr = start_addr + else: + if data_type == Ulong: + end_addr = str(int(start_addr) + 3) + else: + if data_type == Long: + end_addr = str(int(start_addr) + 3) + else: + logger.warn("import modbus slave csv error: invalid Data Type: %s" % mea_name) + return self.response_error_params(line, "Data Type: " + data_type, topic, -11008) + return end_addr + + def verify_alarm_post(self, val, is_put=False): + for has_v in ('name', 'ctrlName', 'measureName'): + if not val[has_v]: + raise ValueError("Invalid %s" % has_v) + + if val["alarmLevel"] not in (1, 2, 3, 4, 5): + return "AlarmLevel: " + str(val["alarmLevel"]) + elif not isinstance(val["content"], str): + return "AlarmContent: Invalid content type" + ops = [AlarmEqual, AlarmNotEqual, AlarmGreater, AlarmLess, AlarmGreaterEqual, + AlarmLessEqual] + if val["condOp"] not in [AlarmAnd, AlarmOr, AlarmNone]: + return "CombineMethod: " + val["condOp"] + if val["cond1"]["op"] not in ops: + return "Condition1: " + val["cond1"]["op"] + value1 = val["cond1"]["value"] + return value1 and self.is_number(value1) or "Operand1: " + str(val["cond1"]["value"]) + if value1: + if "." not in value1: + val["cond1"]["value"] = value1 + ".0" + if val["condOp"] != AlarmNone: + if val["cond2"]["op"] not in ops: + return "Condition2: " + val["cond2"]["op"] + else: + value2 = val["cond2"]["value"] + return value2 and self.is_number(value2) or "Operand2: " + str(val["cond2"]["value"]) + if value2 and "." not in value2: + val["cond2"]["value"] = value2 + ".0" + if val["condOp"] == AlarmAnd: + if val["cond1"]["op"] == AlarmLess and val["cond2"]["op"] == AlarmGreater: + if float(value1) <= float(value2): + return "Operand1: " + str(value1) + "<=" + "Operand2: " + str(value2) + elif val["cond1"]["op"] == AlarmLessEqual and val["cond2"]["op"] == AlarmGreaterEqual: + if float(value1) < float(value2): + return "Operand1: " + str(value1) + "<" + "Operand2: " + str(value2) + elif val["cond1"]["op"] == AlarmGreater and val["cond2"]["op"] == AlarmLess: + if float(value1) >= float(value2): + return "Operand1: " + str(value1) + ">=" + "Operand2: " + str(value2) + elif val["cond1"]["op"] == AlarmGreaterEqual: + if val["cond2"]["op"] == AlarmLessEqual: + if float(value1) > float(value2): + return "Operand1: " + str(value1) + ">" + "Operand2: " + str(value2) + if val["alarmLable"] not in self.schema.simple_config["device_supervisor"]["alarmLables"]: + return "AlarmTag: " + val["alarmLable"] + return NONE + + def find_ctl_name(self, name): + schema_cons = self.schema.simple_config["device_supervisor"]["controllers"] + controllers = list(schema_cons.values()) + for ctl in controllers: + if ctl["name"] == name: + return ctl + + return False + + def find_measure_by_name(self, ctrlName, measureName): + schema_meas = self.schema.simple_config["device_supervisor"]["measures"][ctrlName] + measures = list(schema_meas.values()) + for mea in measures: + if mea["ctrlName"] == ctrlName and mea["name"] == measureName: + return mea + + return False + + def find_modbusSlave_mappingTable_by_name(self, mappingTableName, isRTUSlave): + if isRTUSlave: + schema_mappingTables = self.schema.simple_config["device_supervisor"]["modbusRTUSlave"]["mappingTable"] + else: + schema_mappingTables = self.schema.simple_config["device_supervisor"]["modbusSlave"]["mappingTable"] + for mappingTableKey, mappingTable in list(schema_mappingTables.items()): + if mappingTable["name"] == mappingTableName: + return ( + mappingTableKey, mappingTable) + + return (None, None) + + def is_number(self, s): + if s == "": + return False + if s.count(".") == 1: + if s[0] == "-": + s = s[1[:None]] + if s[0] == ".": + return False + s = s.replace(".", "") + for i in s: + if i not in "0123456789": + return False + else: + return True + + else: + if s.count(".") == 0: + if s[0] == "-": + s = s[1[:None]] + for i in s: + if i not in "0123456789": + return False + else: + return True + + else: + return False + + def is_number_scientific_counting(self, s): + try: + float(s) + return True + except ValueError: + pass + + try: + import unicodedata + unicodedata.numeric(s) + return True + except (TypeError, ValueError): + pass + + return False + + def verify_modbus_import_params(self, val, funcode): + ctl_name = val["ctrlName"] + ind = self.verify_kv_and_get_index(self.ucfg.controllers, "name", ctl_name) + if ind is None: + return "ctrlName: " + val["ctrlName"] + mea_name = val["measureName"] + ind = self.verify_kv_and_get_index((self.ucfg.measures), "name", mea_name, fk="ctrlName", fv=ctl_name) + if ind is None: + return "measureName: " + val["measureName"] + if val["readWrite"] not in ('ro', 'wo', 'rw'): + return "readWrite: " + val["readWrite"] + mea = self.ucfg.measures[ind] + if val["readWrite"] != mea["readWrite"]: + return "readWrite: " + val["readWrite"] + dt = val["dataType"] + northdt = val["dataType"] + mb_data_types = [Bool, Bit, Byte, Sint, Word, Int, Dword, Dint, Float, String, + Bcd, Bcd32, Ulong, Long, Double] + if dt not in mb_data_types: + return "dataType: " + dt + if northdt not in mb_data_types: + return "northDataType: " + northdt + start_addr = val["startMapAddr"] + if funcode in ('0', '1'): + if dt in [String, Float, Double]: + return "startMapAddr is: " + start_addr + "but dataType is: " + dt + if northdt in [String, Float, Double]: + return "startMapAddr is: " + start_addr + "but MappingDataType is: " + northdt + elif funcode in ('3', '4'): + if dt in [Bool, Bit]: + return "startMapAddr is: " + start_addr + "but dataType is: " + dt + if northdt in [Bool, Bit]: + return "startMapAddr is: " + start_addr + "but MappingDataType is: " + northdt + else: + return "startMapAddr: " + start_addr + if int(start_addr) < 1 or int(start_addr) > 65536: + return "startMapAddr: " + start_addr + return NONE + + def write_content_to_cert(self, content, cert_file, binary=False): + path, _ = os.path.split(cert_file) + if not os.path.exists(path): + os.makedirs(path) + elif binary: + with open(cert_file, "wb") as f: + f.write(content) + else: + with open(cert_file, "w", encoding="utf-8") as f: + f.write(content) + + def update_opcuaserver_cert_by_content(self, opcuaServer): + if "certificate_content" in opcuaServer: + if len(opcuaServer["certificate_content"]): + if opcuaServer["certificate"] == "": + opcuaServer["certificate"] = "/var/user/cfg/device_supervisor/opcua/opcua_server_cert.der" + self.write_content_to_cert(base64.b64decode(opcuaServer["certificate_content"]), opcuaServer["certificate"], True) + elif "privateKey_content" in opcuaServer and len(opcuaServer["privateKey_content"]): + if opcuaServer["privateKey"] == "": + opcuaServer["privateKey"] = "/var/user/cfg/device_supervisor/opcua/opcua_server_key.der" + self.write_content_to_cert(base64.b64decode(opcuaServer["privateKey_content"]), opcuaServer["privateKey"], True) + + def update_opcuaDriver_cert_by_content(self, opcuaDriver): + if "certificate_content" in opcuaDriver["args"]: + if len(opcuaDriver["args"]["certificate_content"]): + if opcuaDriver["args"]["certificate"] == "": + opcuaDriver["args"]["certificate"] = "/var/user/cfg/device_supervisor/opcuaDriver/opcua_server_cert.der" + self.write_content_to_cert(base64.b64decode(opcuaDriver["args"]["certificate_content"]), opcuaDriver["args"]["certificate"], True) + elif "privateKey_content" in opcuaDriver["args"] and len(opcuaDriver["args"]["privateKey_content"]): + if opcuaDriver["args"]["privateKey"] == "": + opcuaDriver["args"]["privateKey"] = "/var/user/cfg/device_supervisor/opcuaDriver/opcua_server_key.der" + self.write_content_to_cert(base64.b64decode(opcuaDriver["args"]["privateKey_content"]), opcuaDriver["args"]["privateKey"], True) + + def update_cert_by_contentParse error at or near `COME_FROM' instruction at offset 1058_0 + + def _check_import_cloud_name(self, name): + for _, cloud in self.schema.simple_config["device_supervisor"]["clouds"].items(): + if cloud["name"] == name: + return True + + return False + + def _verify_cld_putParse error at or near `COME_FROM' instruction at offset 1528_0 + + def is_valid_domain(self, value): + pattern = re.compile("^(([a-zA-Z]{1})|([a-zA-Z]{1}[a-zA-Z]{1})|([a-zA-Z]{1}[0-9]{1})|([0-9]{1}[a-zA-Z]{1})|([a-zA-Z0-9][-_.a-zA-Z0-9]{0,61}[a-zA-Z0-9]))\\.([a-zA-Z]{2,13}|[a-zA-Z0-9-]{2,30}.[a-zA-Z]{2,3})$") + if pattern.match(value): + return True + return False + + def verify_pub_post(self, cloud_type, val, is_put=False): + if not val["name"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "NULL name" + return data + if cloud_type == "Aliyun IoT": + if val["msgType"] not in (0, 1, 2): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "pub msgType: " + str(val["msgType"]) + return data + if val["msgType"] == 0 and val["qos"] not in (0, 1, 2): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "pub qos: " + str(val["qos"]) + return data + elif val["qos"] not in (0, 1, 2): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "pub qos: " + str(val["qos"]) + return data + else: + if val["trigger"] == "measure_event": + for group in val["groups"]: + ind = self.verify_kv_and_get_index(list(self.schema.simple_config["device_supervisor"]["groups"].values()), "name", group) + if ind is None: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "pub group: " + group + return data + + else: + if val["trigger"] == "warning_event": + for alarm in val["alarms"]: + if alarm not in self.schema.simple_config["device_supervisor"]["alarmLables"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11026])) + data["params"] = "pub alarms: " + alarm + return data + + else: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "pub trigger: " + val["trigger"] + return data + if not val["topic"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "pub topic: " + val["topic"] + return data + else: + try: + self.verify_script_is_ok(val) + except Exception as e: + try: + logger.warn("Invalid script %s" % e) + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11012])) + data["params"] = val["script"] if "script" in val else "" + return data + finally: + e = None + del e + + def verify_script_is_ok(self, pub_sub): + """ + return True if is valid + :param pub_sub: + :return: + """ + if not re.search("def( +)%s\\(" % pub_sub["funcName"], pub_sub["script"]): + raise KeyError("No main entry method found. %s" % pub_sub["funcName"]) + mymodule = types.ModuleType("mymodule") + exec(pub_sub["script"], mymodule.__dict__) + eval("mymodule.%s" % pub_sub["funcName"]) + + def verify_sub_post(self, cloud_type, val, is_put=False): + if not val["name"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "NULL name" + return data + if cloud_type == "Aliyun IoT": + if val["msgType"] not in (0, 1, 2, 3): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "sub msgType: " + str(val["msgType"]) + return data + if val["msgType"] == 0 and val["qos"] not in (0, 1, 2): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "sub qos: " + str(val["qos"]) + return data + elif val["qos"] not in (0, 1, 2): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "sub qos: " + str(val["qos"]) + return data + trigger = val["trigger"] + if trigger != "command_event": + logger.info("Invalid trigger %s, set as command_event" % trigger) + val["trigger"] = "command_event" + if not val["topic"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "sub topic: " + val["topic"] + return data + if "payload_type" not in val: + val["payload_type"] = "Plaintext" + payload_type = val["payload_type"] + if payload_type not in ('Plaintext', 'Base64', 'JSON', 'Hex'): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "sub payload_type: " + val["payload_type"] + return data + else: + try: + self.verify_script_is_ok(val) + except Exception as e: + try: + logger.warn("Invalid script %s" % e) + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11012])) + data["params"] = val["script"] if "script" in val else "" + return data + finally: + e = None + del e + + def verify_gen_post(self, val, is_put=False): + if not val["name"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "NULL name" + return data + if not val["funcName"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "NULL funcName" + return data + if not val["script"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "NULL script" + return data + if "trigger" in val and val["trigger"] == "timer_event" and not val["period"]: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "NULL period: " + return data + if val["timeUnit"] not in (0, 1, 2): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "name: " + val["name"] + " timeUnit: " + str(val["timeUnit"]) + return data + else: + if "trigger" in val and val["trigger"] == "message_event": + data = val["topic"] or dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "NULL topic: " + return data + else: + if "trigger" in val and val["trigger"] == "dsa_start_event": + pass + else: + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11019])) + data["params"] = "name: " + val["name"] + "trigger: " + val["trigger"] + return data + else: + try: + self.verify_script_is_ok(val) + except Exception as e: + try: + logger.warn("Invalid script %s" % e) + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[-11012])) + data["params"] = val["script"] if "script" in val else "" + return data + finally: + e = None + del e + + def check_bacnet_import_address(self, val): + object_type_list = [ + 'AI', 'AO', 'AV', 'BI', 'BO', 'BV', 'MSI', 'MSO', + 'MSV', 'LOOP', 'TRENDLOG', 'PROPRIETARY_MIN'] + addr_list = val["Address"].split(",") + if addr_list[0] not in object_type_list: + return "Object type unsupported" + return NONE + + def check_modbus_import_address(self, val): + address = val["Address"] + if "X" in address: + ind = address.index("X") + if ind >= 1: + funcode = address[ind - 1] + if funcode not in ('0', '1', '3', '4'): + return "Address: " + address + mod_addr = address[(ind + 1)[:None]] + return self.is_number(mod_addr) or "Address: " + address + else: + return "Address: " + address + if "." in mod_addr: + addr = mod_addr.split(".") + mod_addr = addr[0] + regBit = int(addr[1]) + if int(mod_addr) < 0 or int(mod_addr) > 65536: + return "Address: " + address + if funcode == "1": + if RO != val["ReadWrite"]: + return "Address is: " + address + " but ReadWrite is: " + val["ReadWrite"] + else: + if funcode == "3": + if val["DataType"] == Bit and not regBit < 0: + if regBit > 15: + return "Address: " + address + if RO != val["ReadWrite"]: + return "Address is: " + address + " but ReadWrite is: " + val["ReadWrite"] + elif funcode == "4" and val["DataType"] == Bit and not regBit < 0: + if regBit > 15: + return "Address: " + address + return NONE + + def check_import_group(self, GroupName): + for grp in list(self.schema.simple_config["device_supervisor"]["groups"].values()): + if grp["name"] == GroupName: + return False + + return True + + def response_error_params(self, line, val, topic, error_code): + data = dict(zip(self.ERROR_KEYS_WITH_PARAMS, self.ERROR_VALUES_DICT_WITH_PARAMS[error_code])) + data["params"] = {'line':line, 'invalid':val} + self.build_response_data(data, topic) + + def verify_measure_post(self, ctr_name, val): + """ + Verify each field for the imported file + """ + if not "." in val["ControllerName"]: + if "/" in val["ControllerName"]: + return "ControllerName have '/' or '.': " + val["ControllerName"] + ctr = self.find_ctl_name(ctr_name) + if val["ControllerName"] != ctr["name"]: + return "ControllerName: " + val["ControllerName"] + if len(str(val["MeasuringPointName"])) > 256: + return "MeasuringPointName len > 256: " + val["MeasuringPointName"] + name = val["MeasuringPointName"] + if name in self.names: + return "MeasuringPointName: " + val["MeasuringPointName"] + self.names.append(name) + if val["UploadType"] in [Periodic, OnChange] and not "." in val["GroupName"]: + if "/" in val["GroupName"]: + return "GroupName have '/' or '.': " + val["GroupName"] + if self.check_import_group(val["GroupName"]): + return "GroupName: " + val["GroupName"] + else: + if val["UploadType"] == Never: + if val["GroupName"] != "": + return "GroupName: " + val["GroupName"] + else: + return "UploadType: " + val["UploadType"] + elif val["DataType"] == String: + if "CodeType" not in val: + val["CodeType"] = "UTF-8" + else: + if val["CodeType"] not in ('UTF-8', 'UTF-16', 'UTF-16-BIG', + 'ASCII', 'GB2312', 'GB2312-LITTLE'): + return "CodeType: " + val["CodeType"] + protocol = ctr["protocol"] + if protocol == Snap7: + if val["DataType"] not in [Bit, Byte, Sint, Word, Int, Dword, Dint, Float, Double, + String, Bcd]: + return "DataType: " + val["DataType"] + regType = re.search("\\D+", val["Address"]).group(0) + if regType not in ('I', 'Q', 'M', 'DB', 'V'): + return "Address: " + regType + elif regType == "DB": + dbnumber = re.findall("\\d+", val["Address"])[0] + if not int(dbnumber) < 1: + if int(dbnumber) > 16000: + return "Address: " + dbnumber + if "msecSample" in val and val["msecSample"] == "": + val["msecSample"] = "0" + if val["msecSample"] not in ('0', '1'): + return "msecSample: " + val["msecSample"] + elif protocol in [Eip, EipPCCC]: + if val["DataType"] not in [Bit, Byte, Sint, Word, Int, Dword, Dint, Float, String]: + return "DataType: " + val["DataType"] + elif protocol == OpcUa: + if val["DataType"] not in [Bit, Byte, Sint, Word, Int, Dword, Dint, Float, Double, + String, Ulong, Long]: + return "DataType: " + val["DataType"] + if ";" not in val["Address"]: + return "Address: " + val["Address"] + addr = val["Address"].split(";") + namespace = re.findall("\\d+", addr[0])[0] + if "ns=" not in addr[0] or int(namespace) < 0 or int(namespace) > 65535: + return "Address: " + val["Address"] + addrType = re.findall("[a-z]+", addr[1])[0] + if addrType == "i": + idCode = re.findall("\\d+", addr[1])[0] + if not self.is_number(idCode): + return "Address: " + val["Address"] + elif addrType == "s": + pass + else: + return "Address: " + val["Address"] + if "msecSample" in val: + if val["msecSample"] not in ('0', '1'): + return "msecSample: " + val["msecSample"] + elif "ArrayIndex" not in val: + val["ArrayIndex"] = -1 + elif int(val["ArrayIndex"]) < -1 or int(val["ArrayIndex"]) >= 1024: + return "ArrayIndex: " + val["ArrayIndex"] + else: + if protocol == OpcUa_PubSub: + if val["DataType"] not in [Bit, Byte, Sint, Word, Int, Dword, Dint, Float, Double, + String, Ulong, Long]: + return "DataType: " + val["DataType"] + if "msecSample" in val: + if val["msecSample"] not in ('0', '1'): + return "msecSample: " + val["msecSample"] + elif "ArrayIndex" not in val: + val["ArrayIndex"] = -1 + elif int(val["ArrayIndex"]) < -1 or int(val["ArrayIndex"]) >= 1024: + return "ArrayIndex: " + val["ArrayIndex"] + else: + if protocol == OpcDa: + if val["DataType"] not in [Bit, Byte, Sint, Word, Int, Dword, Dint, Float, + Double, String, Ulong, + Long]: + return "DataType: " + val["DataType"] + if "msecSample" in val: + if val["msecSample"] == "": + val["msecSample"] = "0" + if "msecSample" in val and val["msecSample"] not in ('0', + '1'): + return "msecSample: " + val["msecSample"] + elif protocol in [MbRtu, MbTcp, MbAscii, MbRtuOverTcp, MbRtuSlave]: + if val["DataType"] not in [Bit, Word, Int, Dword, Dint, Float, String, Bcd, + Bcd32, Ulong, Long, + Double]: + return "DataType: " + val["DataType"] + error_val = self.check_modbus_import_address(val) + if error_val != NONE: + return error_val + if "msecSample" in val and val["msecSample"] not in ('0', + '1'): + return "msecSample: " + val["msecSample"] + elif protocol in [MC1C, MC3C, MC4C, MCPS, MC1E, MC3E, MC3COT]: + if val["Address"] == "": + return "Address: " + val["Address"] + regType = re.search("\\D+", val["Address"]).group(0) + if regType not in ('M', 'X', 'Y', 'SM', 'S', 'L', 'F', + 'V', 'B', 'SB', 'DX', 'DY', 'TS', + 'TC', 'SS', 'SC', 'CS', 'CC', 'D', + 'SD', 'W', 'SW', 'R', 'Z', 'ZR', 'TN', + 'SN', 'CN'): + return "Address: " + regType + elif val["DataType"] not in [Bit, Word, Int, Dword, Dint, Float, String]: + return "DataType: " + val["DataType"] + else: + if protocol in [OMFT, OMFU, OMHLS]: + if val["Address"] == "": + return "Address: " + val["Address"] + regType = re.search("\\D+", val["Address"]).group(0) + if regType not in ('D', 'C', 'W', 'H', 'A', 'E'): + return "Address: " + regType + elif val["DataType"] not in [Bit, Word, Int, Dword, Dint, Float, String, Double]: + return "DataType: " + val["DataType"] + else: + if protocol == SIPPI: + if val["Address"] == "": + return "Address: " + val["Address"] + regType = re.search("\\D+", val["Address"]).group(0) + if regType not in ('I', 'Q', 'M', 'DB', 'V'): + return "Address: " + regType + elif val["DataType"] not in [Bit, Byte, Sint, Word, Int, Dword, Dint, Float, + String, Bcd]: + return "DataType: " + val["DataType"] + else: + if protocol == VRCON: + pass + elif protocol in [DLT645_2007, DLT645_1997]: + if val["DataType"] not in [Double, String, Word, Int, Dword, Dint, + Float]: + return "DataType: " + val["DataType"] + elif protocol == PANMEW: + if val["DataType"] not in [Bit, Word, Int, Dword, Dint, Float, + String]: + return "DataType: " + val["DataType"] + if val["Address"] == "": + return "Address: " + val["Address"] + else: + regType = re.search("\\D+", val["Address"]).group(0) + if regType not in ('X', 'Y', 'R', 'DT', 'T', + 'C', 'SV', 'EV', 'IX', + 'IY', 'L', 'LD', 'FL'): + return "Address: " + regType + else: + if protocol in [Easycom, EasyEthernet]: + if val["DataType"] not in [Bit, Byte, Sint, Word, Int, Dword, + Dint, + Float, + String, + Double, + Bcd, + Bcd32, + Ulong, + Long]: + return "DataType: " + val["DataType"] + if val["DataEndianReverse"] not in ('0', + '1'): + return "DataEndianReverse: " + val["DataEndianReverse"] + elif val["ReadWrite"] in ('ro', 'rw'): + if not self.is_number(val["ReadOffset"]): + return "ReadOffset: " + str(val["ReadOffset"]) + if not self.is_number(val["ReadLength"]): + return "ReadLength: " + str(val["ReadLength"]) + if val["ReadWrite"] in ('wo', 'rw'): + if not self.is_number(val["WriteOffset"]): + return "WriteOffset: " + str(val["WriteOffset"]) + if not self.is_number(val["WriteLength"]): + return "WriteLength: " + str(val["WriteLength"]) + if val["DataParseMethod"] not in ('hex2dec', + 'bcd2dec', + 'ieee754', + 'dlt-bcd', + 'dlt-hex', + 'dlt-ascii'): + return "DataParseMethod: " + val["DataParseMethod"] + if val["DataType"] == Bit: + if int(val["BitId"]) < 0 or int(val["BitId"]) > 7: + return "BitId: " + val["BitId"] + else: + if protocol in [IEC101, IEC103, IEC104]: + if val["DataType"] not in [Bit, Byte, Sint, Word, + Int, + Dword, + Dint, + Float, + Ulong, + Long]: + return "DataType: " + val["DataType"] + if " " not in val["Address"]: + return "Address: " + val["Address"] + elif protocol in [TANCYV13, TATEKPROGRAM, TATEKPROGRAM_OverTcp]: + if val["DataType"] not in [Bit, Word, Int, Dword, + Dint, + Float, + String]: + return "DataType: " + val["DataType"] + if val["Address"] == "": + return "Address: " + val["Address"] + regType = re.search("\\D+", val["Address"]).group(0) + if regType not in ('X', + 'Y', + 'M', + 'S', + 'T', + 'C', + 'R', + 'D', + 'RT', + 'RC'): + return "Address: " + regType + elif protocol in [BacBip, BacMstp]: + if val["DataType"] not in [Bit, Byte, Sint, Word, + Int, + Dword, + Dint, + Double, + Float, + Ulong, + Long, + String]: + return "DataType: " + val["DataType"] + error_val = self.check_bacnet_import_address(val) + if error_val != NONE: + return error_val + elif protocol == Euromap63: + if val["DataType"] not in [Bit, Byte, Sint, Word, + Int, + Dword, + Dint, + Float, + Double, + String]: + return "DataType: " + val["DataType"] + if val["Address"] == "": + return "Address: " + val["Address"] + elif protocol in [DNP3Tcp, DNP3Udp, DNP3RTU]: + if val["DataType"] not in [Bit, Byte, Sint, Word, + Int, + Dword, + Dint, + Float, + Double, + String]: + return "DataType: " + val["DataType"] + elif protocol in [SIFW, SIPPI_OverTcp, SI_WebApi, + MCR, + MC3E_UDP, + MCFS_OverTcp, + MCFL, + MCFL_OverTcp, + OMHL_OverTcp, + OMHLC, + OMHLC_OverTcp, + OMCN, + OMCCN, + ABCC, + ABMC, + ABSLC, + KEMC, + KENano, + KENano_OverTcp, + PANMC, + PANMEW_OverTcp, + BFADS, + DTSerial, + DTAscii, + DTTcp, + DTSerial_OverTcp, + XJSerial, + XJSerial_OverTcp, + XJTcpNet, + XJIN, + VGSerial, + VGSerial_OverTcp, + FJSPB, + FJSPB_OverTcp, + FJSPHNet, + GESRPT, + YKGWLT, + DLT645_OverTcp, + DLT698, + IVSerial, + IV_OverTcp, + IV_TcpNet, + CNCFS, + RobotEfort, + RobotAbb, + RobotFanuc, + Iec61850_MMS, + KeBaSocket, + CJT188_2004, + Hj212_Serial, + Hj2122005_Serial, + OpcUa_PubSub]: + if val["DataType"] not in [Bit, Byte, Sint, Word, + Int, + Dword, + Dint, + Float, + Double, + String, + Bcd, + Bcd32, + Ulong, + Long]: + return "DataType: " + val["DataType"] + if val["Address"] == "": + return "Address: " + val["Address"] + else: + logger.warn("Invalid protocol %s" % protocol) + return "protocol: " + protocol + if val["DataType"] in [Float, Double]: + if val["Decimal"] == "": + val["Decimal"] = 2 + if int(val["Decimal"]) < 1 or int(val["Decimal"]) > 6: + return "Decimal: " + val["Decimal"] + elif val["DataType"] == String: + if val["Len"] == "": + val["Len"] = 2 + if int(val["Len"]) < 0 or int(val["Len"]) > 1024: + return "Len: " + val["Len"] + elif val["DataType"] == Bit: + if "bitMap" not in val: + val["bitMap"] = "0" + if val["bitMap"] not in ('0', '1'): + return "bitMap: " + val["bitMap"] + if "reverseBit" not in val: + val["reverseBit"] = "0" + if val["reverseBit"] not in ('0', '1'): + return "reverseBit: " + val["reverseBit"] + elif val["DataType"] in [Byte, Sint, Word, Int, Dword, Dint, + Bcd, + Bcd32, + Ulong, + Long]: + if "EnableBit" not in val: + val["EnableBit"] = "0" + if int(val["EnableBit"]) not in (0, + 1): + return "EnableBit: " + str(val["EnableBit"]) + if int(val["EnableBit"]) == 1: + if not self.is_number_scientific_counting(val["BitIndex"]): + return "BitIndex: " + str(val["BitIndex"]) + if int(val["bitMap"]) not in (0, + 1): + return "bitMap: " + str(val["bitMap"]) + if int(val["reverseBit"]) not in (0, + 1): + return "reverseBit: " + str(val["reverseBit"]) + if not val["DataType"] in [Byte, Sint] or int(val["BitIndex"]) > 7 or int(val["BitIndex"]) < 0: + return "BitIndex: " + str(val["BitIndex"]) + else: + pass + if val["DataType"] in [Word, Int, Bcd]: + if int(val["BitIndex"]) > 15 or int(val["BitIndex"]) < 0: + return "BitIndex: " + str(val["BitIndex"]) + elif not val["DataType"] in [Dword, Dint, Bcd32] or int(val["BitIndex"]) > 31 or int(val["BitIndex"]) < 0: + return "BitIndex: " + str(val["BitIndex"]) + else: + pass + if not val["DataType"] in [Ulong, Long] or int(val["BitIndex"]) > 63 or int(val["BitIndex"]) < 0: + return "BitIndex: " + str(val["BitIndex"]) + if val["UploadType"] == OnChange: + if "EnableBit" in val and val["EnableBit"] != "": + if val["EnableBit"] == "0": + if "DeadZonePercent" not in val: + val["DeadZonePercent"] = "0" + if not self.is_number_scientific_counting(val["DeadZonePercent"]): + return "DeadZonePercent: " + val["DeadZonePercent"] + elif val["DataType"] in [Float, Double]: + if "DeadZonePercent" not in val: + val["DeadZonePercent"] = "0" + if not self.is_number_scientific_counting(val["DeadZonePercent"]): + return "DeadZonePercent: " + val["DeadZonePercent"] + if "enablepollCycle" in ctr and ctr["enablepollCycle"] == 1 and not "pollCycle" not in val: + if val["pollCycle"] == "": + val["pollCycle"] = "0" + if val["pollCycle"] not in ('0', '1'): + return "pollCycle: " + val["pollCycle"] + if val["ReadWrite"] not in [RO, WO, RW]: + return "ReadWrite: " + val["ReadWrite"] + if len(val["Unit"]) > 16: + return "Unit: " + val["Unit"] + if len(val["Description"]) > 128: + return "Description: " + val["Description"] + if val["UploadType"] != "never": + if "storageLwTSDB" not in val: + val["storageLwTSDB"] = 0 + if int(val["storageLwTSDB"]) not in (0, 1): + return "storageLwTSDB: " + val["storageLwTSDB"] + if val["Transform Type"] == "none": + pass + if val["Transform Type"] == "gain": + if not self.is_number_scientific_counting(val["MaxValue"]): + return "MaxValue: " + str(val["MaxValue"]) + else: + if not self.is_number_scientific_counting(val["MinValue"]): + return "MinValue: " + str(val["MinValue"]) + elif float(val["MaxValue"]) < float(val["MinValue"]): + return "MaxValue: " + str(val["MaxValue"]) + "MinValue: " + str(val["MinValue"]) + return self.is_number_scientific_counting(val["MaxScale"]) or "MaxScale: " + str(val["MaxScale"]) + return self.is_number_scientific_counting(val["MinScale"]) or "MinScale: " + str(val["MinScale"]) + if float(val["MaxScale"]) < float(val["MinScale"]): + return "MaxScale: " + str(val["MaxScale"]) + "MinScale: " + str(val["MinScale"]) + if "TransDecimal" not in val: + val["TransDecimal"] = 6 + return self.is_number_scientific_counting(val["TransDecimal"]) or "TransDecimal: " + str(val["TransDecimal"]) + else: + if val["Transform Type"] == "zoom": + if not self.is_number_scientific_counting(val["Gain"]): + return "Gain: " + str(val["Gain"]) + else: + return self.is_number_scientific_counting(val["Offset"]) or "Offset: " + str(val["Offset"]) + if "TransDecimal" not in val: + val["TransDecimal"] = 6 + return self.is_number_scientific_counting(val["TransDecimal"]) or "TransDecimal: " + str(val["TransDecimal"]) + else: + if val["Transform Type"] == "ptct": + if not self.is_number_scientific_counting(val["Gain"]): + return "Gain: " + str(val["Gain"]) + else: + if not self.is_number_scientific_counting(val["Offset"]): + return "Offset: " + str(val["Offset"]) + else: + return self.is_number_scientific_counting(val["Pt"]) or "Pt: " + str(val["Pt"]) + return self.is_number_scientific_counting(val["Ct"]) or "Ct: " + str(val["Ct"]) + if "TransDecimal" not in val: + val["TransDecimal"] = 6 + return self.is_number_scientific_counting(val["TransDecimal"]) or "TransDecimal: " + str(val["TransDecimal"]) + else: + if val["Transform Type"] == "bit": + if val["startBit"] and val["endBit"]: + if not self.is_number_scientific_counting(val["startBit"]): + return "startBit: " + str(val["startBit"]) + else: + return self.is_number_scientific_counting(val["endBit"]) or "endBit: " + str(val["endBit"]) + if val["DataType"] in [Ulong, Long] and not int(val["startBit"]) < 0: + if int(val["startBit"]) > 63: + return "startBit: " + str(val["startBit"]) + if int(val["endBit"]) < 0 or int(val["endBit"]) > 63: + return "endBit: " + str(val["endBit"]) + if int(val["endBit"]) <= int(val["startBit"]): + return "endBit: " + str(val["endBit"]) + elif val["DataType"] in [Dword, Dint] and not int(val["startBit"]) < 0: + if int(val["startBit"]) > 31: + return "startBit: " + str(val["startBit"]) + if int(val["endBit"]) < 0 or int(val["endBit"]) > 31: + return "endBit: " + str(val["endBit"]) + if int(val["endBit"]) <= int(val["startBit"]): + return "endBit: " + str(val["endBit"]) + elif val["DataType"] in [Word, Int] and not int(val["startBit"]) < 0: + if int(val["startBit"]) > 15: + return "startBit: " + str(val["startBit"]) + if int(val["endBit"]) < 0 or int(val["endBit"]) > 15: + return "endBit: " + str(val["endBit"]) + if int(val["endBit"]) <= int(val["startBit"]): + return "endBit: " + str(val["endBit"]) + elif val["DataType"] in [Byte, Sint] and not int(val["startBit"]) < 0: + if int(val["startBit"]) > 7: + return "startBit: " + str(val["startBit"]) + if not int(val["endBit"]) < 0: + if int(val["endBit"]) > 7: + return "endBit: " + str(val["endBit"]) + if int(val["endBit"]) <= int(val["startBit"]): + return "endBit: " + str(val["endBit"]) + else: + if val["Transform Type"] == "numMapping": + Mapping_table = eval(val["Mapping_table"]) + if not isinstance(Mapping_table, list): + return "Mapping_table: This object is not a list" + for i in Mapping_table: + if not "source" not in i: + if "target" not in i: + pass + return "Mapping_table" + str(val["Mapping_table"]) + + else: + return "Transform Type: " + val["Transform Type"] + return NONE + + def on_erlang_bind_info(self, topic, payload): + """ + :param topic: + :param payload: + :return: + """ + try: + logger.debug("master receive topic: %s , payload: %s" % (topic, payload)) + bind_config = copy.deepcopy(self.ucfg.bindConfig) + group_dict = dict() + for mea in bind_config["variables"]: + gid = mea["groupId"] + if gid not in group_dict: + group_dict[gid] = 1 + else: + group_dict[gid] += 1 + + for grp in bind_config["varGroups"]: + gid = grp["id"] + if gid in group_dict: + grp["reference"] = group_dict[gid] + + if "variables" in bind_config: + del bind_config["variables"] + if "alerts" in bind_config: + del bind_config["alerts"] + data = self.wrap_response_data(bind_config) + return self.build_response_data(data, topic) + except Exception as e: + try: + logger.warn("bind info response error %s" % e) + self.pub_invalid_response_by_params(topic, -1003) + finally: + e = None + del e + + def on_erlang_bind_group_info(self, topic, payload): + """ + :param topic: + :param payload: + :return: + """ + try: + logger.debug("master receive topic: %s , payload: %s" % (topic, payload)) + payload = self.payload_transfer(payload) + cursor = int(payload["cursor"]) + length = int(payload["limit"]) + data = dict() + mea_list = list() + variables = copy.deepcopy(self.ucfg.bindConfig["variables"]) + group_id = payload["groupId"] + mea_name = payload["measureName"] if "measureName" in payload else "" + for mea in variables: + gid = mea["groupId"] + if gid == group_id: + if mea_name: + name = mea["measureName"] + if mea_name.lower() in name.lower(): + mea_list.append(mea) + else: + mea_list.append(mea) + + data["limit"] = length + key_length = len(mea_list) + if length == 0 or cursor + length > key_length: + length = key_length + else: + length = cursor + length + data["total"] = key_length + data["cursor"] = cursor + data["result"] = mea_list[cursor[:length]] + return self.build_response_data(data, topic) + except Exception as e: + try: + logger.warn("bind info response error %s" % e) + self.pub_invalid_response_by_params(topic, -1003) + finally: + e = None + del e + + def clear_offline_data(self, topic, payload): + logger.debug("master receive topic: %s , payload: %s" % (topic, payload)) + try: + payload = self.payload_transfer(payload) + cloud_name = payload["name"] + self.master.stop_service(gl_cloud_serviceId_dict[cloud_name]) + cmd = "rm -fr %s/%s*" % (self.schema.simple_config["device_supervisor"]["misc"]["cachePath"], cloud_name) + os.system(cmd) + self.master.start_service(gl_cloud_serviceId_dict[cloud_name]) + data = self.wrap_response_data("ok") + return self.build_response_data(data, topic) + except Exception as e: + try: + logger.warn("Response error: %s" % e) + self.pub_invalid_response_by_params(topic, -1003) + finally: + e = None + del e + + def get_ab_login_config(self, topic, payload): + try: + with open("/var/user/data/dbhome/device_supervisor/login.json", "r", encoding="utf-8") as fp: + device_config = {"result": (json.load(fp))} + except Exception: + device_config = {"result": {'url':"", 'username':"", 'password':""}} + + self.build_response_data(device_config, topic) + + def _get_cloud_device_config(self, device_config, url, token, page=0): + headers = {"X-Authorization": ("Bearer " + token)} + get_device_url = "%s/api/tenant/devices" % url + get_device_params = "page=%s&pageSize=1" % page + device_res = requests.get(get_device_url, params=get_device_params, headers=headers, timeout=60) + device_res = json.loads(device_res.text) + for device in device_res["data"]: + get_var_url = "%s/api/plugins/telemetry/DEVICE/%s/values/timeseries" % (url, device["id"]["id"]) + var_res = requests.get(get_var_url, headers=headers, timeout=60) + var_res = json.loads(var_res.text) + for _, var in var_res.items(): + if var[0]["value"].isdigit(): + var[0]["dataType"] = "WORD" + else: + if "." in var[0]["value"] and var[0]["value"].replace(".", "", 1).isdigit(): + var[0]["dataType"] = "FLOAT" + else: + if var[0]["value"].lower() == "true" or var[0]["value"].lower() == "false": + var[0]["dataType"] = "BOOL" + else: + var[0]["dataType"] = "STRING" + del var[0]["ts"] + del var[0]["value"] + + device_config[device["name"]] = var_res + + if page + 1 < int(device_res["totalPages"]): + self._get_cloud_device_config(device_config, url, token, page + 1) + + def _get_cloud_device_token(self, payload): + try: + with open("/var/user/data/dbhome/device_supervisor/login.json", "r", encoding="utf-8") as fp: + payload["token"] = json.load(fp)["token"] + except Exception: + payload["token"] = str() + + get_device_url = payload["url"] + "/api/tenant/devices" + get_device_params = "page=0&pageSize=1" + headers = {"X-Authorization": ("Bearer " + payload["token"])} + device_res = requests.get(get_device_url, params=get_device_params, headers=headers, timeout=60) + if device_res.status_code == 401: + auth_login_url = "%s/api/auth/login" % payload["url"] + data = {'username':payload["username"], 'password':payload["password"]} + response = requests.post(auth_login_url, data=(json.dumps(data)), timeout=60) + payload["token"] = json.loads(response.text)["token"] + logger.info("login cloud get token:%s" % payload["token"]) + + def get_ab_cloud_config(self, topic, payload): + try: + payload = self.payload_transfer(payload) + self._get_cloud_device_token(payload) + with open("/var/user/data/dbhome/device_supervisor/login.json", "w", encoding="utf-8") as fp: + import rapidjson + rapidjson.dump(payload, fp, indent=1, ensure_ascii=False) + device_config = dict() + self._get_cloud_device_config(device_config, payload["url"], payload["token"], 0) + with open("/var/user/data/dbhome/device_supervisor/devices.json", "w", encoding="utf-8") as fp: + import rapidjson + rapidjson.dump(device_config, fp, indent=1, ensure_ascii=False) + response = {"result": "ok"} + except Exception as error: + try: + logger.warn("Get ab cloud config failed(%s)" % error) + response = {'result':"falied", 'message':str(error)} + finally: + error = None + del error + + self.build_response_data(response, topic) + + def get_ab_device_config(self, topic, payload): + try: + with open("/var/user/data/dbhome/device_supervisor/devices.json", "r", encoding="utf-8") as fp: + device_config = {"result": (json.load(fp))} + except Exception: + device_config = {"result": {}} + + self.build_response_data(device_config, topic) + + def update_ab_device_config(self, topic, payload): + try: + payload = self.payload_transfer(payload) + with open("/var/user/data/dbhome/device_supervisor/devices.json", "w", encoding="utf-8") as fp: + import rapidjson + rapidjson.dump(payload, fp, indent=1, ensure_ascii=False) + data = self.wrap_response_data("ok") + except Exception as error: + try: + logger.warn("update ab decive config failed(%s)" % error) + data = self.wrap_response_data("failed") + finally: + error = None + del error + + self.build_response_data(data, topic) + + def aliyun_update_config(self, topic, payload): + try: + logger.debug("master receive topic: %s , payload: %s" % (topic, payload)) + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + payload = self.payload_transfer(payload) + clouds = self.schema.simple_config["device_supervisor"]["clouds"] + for _, cloud in clouds.items(): + if cloud["name"] == payload["name"] and cloud["type"] == payload["type"]: + cloud["args"]["connType"] = int(payload["connType"]) + cloud["args"]["deviceSecret"] = payload["deviceSecret"] + + self.on_cfg_editing(wait_time=1) + except Exception as e: + try: + logger.warn("Response error: %s" % e) + finally: + e = None + del e + + def group_update_config(self, topic, payload): + try: + logger.debug("master receive topic: %s , payload: %s" % (topic, payload)) + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + payload = self.payload_transfer(payload) + groups = self.schema.simple_config["device_supervisor"]["groups"] + for _, group in groups.items(): + if group["name"] == payload["group_name"]: + group["uploadInterval"] = int(payload["upload_interval"]) + + self.on_cfg_editing(wait_time=1) + except Exception as e: + try: + logger.warn("Response error: %s" % e) + finally: + e = None + del e + + def get_config(self, topic, payload): + try: + runningConfig = True + payload = self.payload_transfer(payload) + if "runningConfig" in payload: + runningConfig = payload["runningConfig"] + elif runningConfig: + ucfg = copy.deepcopy(self.cfg.ucfg) + simple_config_save_user_config(ucfg, self.schema.simple_config["device_supervisor"]) + response = {'task_id':payload["task_id"], 'config':ucfg.__dict__} + else: + response = {'task_id':payload["task_id"], + 'config':(self.master.cfg.ucfg).__dict__} + except Exception as error: + try: + logger.warn("Response error: %s" % error) + response = {'task_id':payload["task_id"], 'config':""} + finally: + error = None + del error + + self.build_response_data(response, topic + "/response") + + def update_config(self, topic, payload): + try: + payload = self.payload_transfer(payload) + update_config = payload["config"] + if "version" in update_config: + update_config["version"] = "" + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + self.master.cfg.load_config_file(update_config) + self.schema.load_persistant_config() + self.update_opcuaserver_cert_by_content(self.ucfg.opcuaServer) + for cloud in self.ucfg.clouds: + self.update_cert_by_content(cloud) + + for controller in self.ucfg.controllers: + if controller["protocol"] == "OPC-UA" and controller["args"]["auth"] == "certificate": + self.update_opcuaDriver_cert_by_content(controller) + + self.on_cfg_editing(wait_time=1) + response = {'task_id':payload["task_id"], 'result':"success"} + except Exception as error: + try: + logger.warn("Response error: %s" % error) + response = {'task_id':payload["task_id"], 'result':"failed"} + finally: + error = None + del error + + self.build_response_data(response, topic + "/response") + + def import_global_config(self, filepath, topic): + try: + logger.debug("master receive import global config request topic: %s, filepath: %s" % (topic, filepath)) + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + with open(filepath, "r", encoding="utf-8") as f: + global_config = json.load(f) + if "version" in global_config: + global_config["version"] = "" + self.master.cfg.load_config_file(global_config) + self.schema.load_persistant_config() + self.update_opcuaserver_cert_by_content(self.ucfg.opcuaServer) + for cloud in self.ucfg.clouds: + self.update_cert_by_content(cloud) + + for controller in self.ucfg.controllers: + if controller["protocol"] == "OPC-UA" and controller["args"]["auth"] == "certificate": + self.update_opcuaDriver_cert_by_content(controller) + + self.on_cfg_editing(wait_time=1) + os.remove(filepath) + response = self.wrap_response_data("ok") + except Exception as e: + try: + logger.warn("Response error: %s" % e) + response = self.wrap_response_data("failed") + finally: + e = None + del e + + self.build_response_data(response, topic) + + def show_log_callback(self, topic, payload): + level = "ALL" + key = "" + lines = 100 + raw_log_lines = [] + show_log_lines = [] + log_file = "%s/log/device_supervisor.log" % self.cfg.app_base_path + try: + payload = self.payload_transfer(payload) + lines = int(payload["lines"]) + if "level" in payload: + level = payload["level"] + if "key" in payload: + key = payload["key"] + with open(log_file, "r") as fp: + raw_log_lines = fp.readlines()[1[:None]] + except Exception as error: + try: + logger.warn("read log failed(%s)" % error) + finally: + error = None + del error + + log_format = "(\\S+) (\\S+) (\\S+) (.+)" + for line in raw_log_lines: + match = re.match(log_format, line) + if match is None: + continue + log_level = match.group(3) + if log_level not in ('ERROR', 'WARNING', 'INFO', 'DEBUG'): + continue + if level in ('ERROR', ): + if log_level not in "ERROR": + continue + else: + if level in ('WARN', ): + if log_level not in ('ERROR', 'WARNING'): + continue + elif level in ('INFO', ): + if log_level not in ('ERROR', 'WARNING', 'INFO'): + continue + log_message = match.group(4) + if len(key) > 0: + if key not in log_message: + continue + show_log_lines.append(line) + + self.build_response_data({"result": (show_log_lines[(-lines)[:None]])}, topic) + + def log_action_callback(self, topic, payload): + payload = self.payload_transfer(payload) + if "type" in payload and payload["type"] == "delete": + log_file = "%s/log/device_supervisor.log" % self.cfg.app_base_path + open(log_file, "w").close() + os.system("rm -f %s.*; sync" % log_file) + response = self.wrap_response_data("ok") + else: + response = self.wrap_response_data("failed") + self.build_response_data(response, topic) + + def build_log(self): + cmd = "cd %s/log; tar -zcvf device_supervisor-%s.tar.gz device_supervisor.log*" % ( + self.cfg.app_base_path, self.export_log_thread.ident) + os.system(cmd) + + def export_log_callback(self, topic, payload): + self.export_log_thread = threading.Thread(target=(self.build_log)) + self.export_log_thread.start() + self.export_log_status[self.export_log_thread.ident] = {"status": "doing"} + response = {'result':"ok", 'id':(self.export_log_thread).ident} + self.build_response_data(response, topic) + + def export_log_status_callback(self, topic, payload): + payload = self.payload_transfer(payload) + if "id" not in payload: + logger.error("request is invaild!") + return + elif self.export_log_thread.is_alive(): + response = {"result": (self.export_log_status[int(payload["id"])])} + else: + if int(payload["id"]) in self.export_log_status: + filepath = "%s/log/device_supervisor-%s.tar.gz" % (self.cfg.app_base_path, payload["id"]) + if os.path.exists(filepath): + response = {"result": {"status": "success"}} + else: + response = {"result": {"status": "failed"}} + del self.export_log_status[int(payload["id"])] + else: + response = {"result": {"status": "failed"}} + self.build_response_data(response, topic) + + def config_action_callback(self, topic, payload): + try: + logger.debug("master receive topic: %s , payload: %s" % (topic, payload)) + self.schema.simple_config_old = copy.deepcopy(self.schema.simple_config) + payload = self.payload_transfer(payload) + if payload["type"] == "reset": + filepath = "%s/cfg/device_supervisor/device_supervisor.cfg" % self.cfg.app_base_path + if os.path.exists(filepath): + os.remove(filepath) + self.master.cfg.load_config_file() + self.schema.load_persistant_config() + self.on_cfg_editing(wait_time=1) + response = self.wrap_response_data("ok") + else: + response = self.wrap_response_data("failed") + except Exception as e: + try: + logger.warn("Response error: %s" % e) + response = self.wrap_response_data("failed") + finally: + e = None + del e + + self.build_response_data(response, topic) \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/master/__init__.py b/APPS_UNCOMPILED/src/master/__init__.py new file mode 100644 index 0000000..21e188f --- /dev/null +++ b/APPS_UNCOMPILED/src/master/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/master/__init__.py +# Compiled at: 2024-04-18 03:12:55 +pass diff --git a/APPS_UNCOMPILED/src/mindsphere.py b/APPS_UNCOMPILED/src/mindsphere.py new file mode 100644 index 0000000..4191538 --- /dev/null +++ b/APPS_UNCOMPILED/src/mindsphere.py @@ -0,0 +1,42 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/mindsphere.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1359 bytes +import sys, os, argparse +from mindsphere.mindsphere_function import MindSphereFunction +VAR_RUN_PYTHON_DS2_PATH = "/var/run/python/ds2" + +def write_pid_to_file(service_id): + if not os.path.exists(VAR_RUN_PYTHON_DS2_PATH): + os.mkdir(VAR_RUN_PYTHON_DS2_PATH) + pid_file_path = VAR_RUN_PYTHON_DS2_PATH + "/" + "mindsphere.py-" + str(service_id) + if os.path.exists(pid_file_path): + os.remove(pid_file_path) + with open(pid_file_path, "w") as f: + f.write(str(os.getpid())) + + +def delete_pid_to_file(service_id): + pid_file_path = VAR_RUN_PYTHON_DS2_PATH + "/" + "mindsphere.py-" + service_id + if os.path.exists(pid_file_path): + os.remove(pid_file_path) + + +def main(argv=sys.argv): + ap = argparse.ArgumentParser(description="Drivers Server") + ap.add_argument("-s", "--service_id", action="store", required=True, help="Service id.") + ap.add_argument("-c", "--config", action="store", required=True, help="Config file path.") + args = ap.parse_args(argv[1[:None]]) + write_pid_to_file(int(args.service_id)) + mindsphere = MindSphereFunction(args.service_id, args.config) + cfg = mindsphere._config_loader() + mindsphere.sub_topic(cfg) + mindsphere._run_mindsphereput(cfg) + delete_pid_to_file(str(args.service_id)) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/src/mindsphere/__init__.py b/APPS_UNCOMPILED/src/mindsphere/__init__.py new file mode 100644 index 0000000..8e6a36b --- /dev/null +++ b/APPS_UNCOMPILED/src/mindsphere/__init__.py @@ -0,0 +1,7 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/mindsphere/__init__.py +# Compiled at: 2024-04-18 03:12:55 +pass diff --git a/APPS_UNCOMPILED/src/mindsphere/mindspherePut.py b/APPS_UNCOMPILED/src/mindsphere/mindspherePut.py new file mode 100644 index 0000000..b4c2fc7 --- /dev/null +++ b/APPS_UNCOMPILED/src/mindsphere/mindspherePut.py @@ -0,0 +1,88 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/mindsphere/mindspherePut.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 3379 bytes +from mindsphere_core import RestClientConfig +from mindsphere_core import AppCredentials +from mindsphere_core.exceptions import MindsphereError +from timeseries.clients.time_series_client import TimeSeriesClient +from timeseries.models.put_timeseries_request import PutTimeseriesRequest +from timeseries.models.timeseries import Timeseries +from common.Logger import logger +import time + +def timestamp2Utc(timestamp): + timestamp = timestamp - 28800 + gmtimes = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.localtime(timestamp)) + return gmtimes + + +def find_measure_uploadRule_config(uploadRules, ctrlName, measureName): + for uploadRule in uploadRules: + if uploadRule["ctrlName"] == ctrlName and uploadRule["measureName"] == measureName: + return uploadRule + + +def upload_message(message, mindsphereput, enableOfflinePut): + putPayload = {} + for measure in message["measures"]: + if not enableOfflinePut: + if not measure["health"]: + continue + uploadRule = find_measure_uploadRule_config(mindsphereput["uploadRules"], measure["ctrlName"], measure["name"]) + if uploadRule: + if uploadRule["hide"] == 0: + putPayload[uploadRule["uploadName"]] = measure["value"] + else: + putPayload[measure["name"]] = measure["value"] + + return putPayload + + +def init_clientconfig(mindsphere): + clientconfig = RestClientConfig(connection_timeout_in_seconds=(mindsphere["args"]["timeout"]), + host_environment=(mindsphere["args"]["hostEnvironment"]), + host_domain=(mindsphere["args"]["hostDomain"])) + return clientconfig + + +def init_credentials(mindsphere): + credentials = AppCredentials(app_name=(mindsphere["args"]["appName"]), + app_version=(mindsphere["args"]["appVersion"]), + host_tenant=(mindsphere["args"]["hostTenant"]), + user_tenant=(mindsphere["args"]["userTenant"]), + key_store_client_id=(mindsphere["args"]["clientId"]), + key_store_client_secret=(mindsphere["args"]["clientSecret"])) + return credentials + + +def MindSpherePut(time_series_data, mindsphereput, clientconfig, credentials): + logger.debug("realTimeData is %s" % time_series_data) + timeseriesClient = TimeSeriesClient(rest_client_config=clientconfig, mindsphere_credentials=credentials) + try: + request = PutTimeseriesRequest(timeseries=[ + time_series_data], + entity=(mindsphereput["entityId"]), + propertysetname=(mindsphereput["propertyName"])) + timeseriesClient.put_timeseries(request) + logger.debug("put realTimeData success") + except MindsphereError as err: + try: + logger.error("put realTimeData is err(%s)" % err.message) + finally: + err = None + del err + + +def midnspherePayload(message, mindsphereput, enableOfflinePut, clientconfig, credentials): + time = timestamp2Utc(message["timestamp"]) + time_series_data = Timeseries() + time_series_data.time = time + time_series_data.fields = upload_message(message, mindsphereput, enableOfflinePut) + if time_series_data.fields: + MindSpherePut(time_series_data, mindsphereput, clientconfig, credentials) + else: + return diff --git a/APPS_UNCOMPILED/src/mindsphere/mindsphere_function.py b/APPS_UNCOMPILED/src/mindsphere/mindsphere_function.py new file mode 100644 index 0000000..1dda533 --- /dev/null +++ b/APPS_UNCOMPILED/src/mindsphere/mindsphere_function.py @@ -0,0 +1,127 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/mindsphere/mindsphere_function.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5147 bytes +import os, json, signal, random, string, logging, libevent +from common.MobiusAPI import MobiusPi +from common.MQClient import MQClientLibevent +from common.Logger import logger +import common.InternalTopic as InternalTopic +from .mindspherePut import init_clientconfig, init_credentials, midnspherePayload + +class MindSphereFunction: + + def __init__(self, service_id, config_file): + self.base = libevent.Base() + self.service_id = service_id + self.config_file = config_file + self.mindsphere_cfg = None + self.group_topic = [InternalTopic.EVENT_BUS_NORTH_MEASURES, InternalTopic.EVENT_BUS_NORTH_MEASURES_ONCHANGE] + self.clientconfig = None + self.credentials = None + self.mobiuspi = MobiusPi() + self._client_id = "mindsphere@" + "".join(random.sample(string.ascii_letters + string.digits, 8)) + self.mqclient = None + self._init_mqtt_channel() + self.sig_int = libevent.Signal(self.base, signal.SIGINT, self._on_signal_handler) + self.sig_int.add() + self.sig_term = libevent.Signal(self.base, signal.SIGTERM, self._on_signal_handler) + self.sig_term.add() + + def _init_mqtt_channel(self): + self.mqclient = MQClientLibevent(self.base, self._client_id) + self.mqclient._on_connected = self.on_connected + if self.mobiuspi.model_name[None[:2]].startswith("EC") or self.mobiuspi.model_name[None[:3]].startswith("HEC"): + self.mqclient.set_mq_info((self._client_id), target_username="admin", target_passwd="admin") + self.mqclient.init_mqclient() + self.mqclient.connect() + + def sub_topic(self, cfg): + self.mqclient.add_sub(InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PING, self.ping_callback) + for mindsphere in cfg["mindspheres"]: + if not mindsphere is None: + if mindsphere["enable"] == 0: + continue + for mindsphereput in mindsphere["mindsphereputs"]: + for group in mindsphereput["groups"]: + self.sub_group_topic(group) + + def sub_group_topic(self, group_name): + for group_topic in self.group_topic: + self.mqclient.add_sub(group_topic.format(groupName=group_name), self.group_callback) + + def ping_callback(self, topic, payload): + try: + payload = payload if not isinstance(payload, (str, bytes)) else json.loads(payload) + except Exception: + payload = payload + + try: + if "message" in payload: + if payload["message"] == "ping": + resp_topic = InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PONG + payload = {'message':"pong", 'ServiceId':self.service_id} + logger.debug("MindSphere response service status topic:%s, payload:%s" % (resp_topic, payload)) + self.mqclient.publish(resp_topic, json.dumps(payload)) + except Exception as error: + try: + logger.error("MindSphere response service status failed(%s)" % error) + finally: + error = None + del error + + def group_callback(self, topic, payload): + try: + payload = payload if not isinstance(payload, (str, bytes)) else json.loads(payload) + except Exception: + payload = payload + + for mindsphere in self.mindsphere_cfg: + if not mindsphere is None: + if mindsphere["enable"] == 0: + continue + for mindsphereput in mindsphere["mindsphereputs"]: + if payload["group"] in mindsphereput["groups"]: + midnspherePayload(payload, mindsphereput, mindsphere["args"]["enableOfflinePut"], self.clientconfig, self.credentials) + + def on_connected(self, client): + logger.info("local mqtt connected ...") + + def _on_signal_handler(self, evt, fd, userdata): + logger.info("Received SIGINT/SIGTERM, MindSphere exit...") + self.base.loopexit(0) + + def _config_loader(self): + logger.info("Loaded config files: {}".format(self.config_file)) + config_file = os.path.dirname(self.config_file) + "/MindSphere.json" + if os.path.exists(self.config_file): + os.rename(self.config_file, config_file) + with open(config_file, "r", encoding="utf-8-sig") as load_fd: + load_cfg = json.load(load_fd) + return load_cfg + + def _run_mindsphereput(self, cfg): + upper_level = cfg["misc"]["logLvl"].upper() + if upper_level == "DEBUG": + logger.set_level(logging.DEBUG) + else: + if upper_level == "INFO": + logger.set_level(logging.INFO) + else: + if upper_level == "WARN" or upper_level == "WARNING": + logger.set_level(logging.WARN) + else: + if upper_level == "ERROR": + logger.set_level(logging.ERROR) + self.mindsphere_cfg = cfg["mindspheres"] + for mindsphere in self.mindsphere_cfg: + if not mindsphere is None: + if mindsphere["enable"] == 0: + continue + self.clientconfig = init_clientconfig(mindsphere) + self.credentials = init_credentials(mindsphere) + + self.base.loop() diff --git a/APPS_UNCOMPILED/src/opcuasvr.py b/APPS_UNCOMPILED/src/opcuasvr.py new file mode 100644 index 0000000..c654f7a --- /dev/null +++ b/APPS_UNCOMPILED/src/opcuasvr.py @@ -0,0 +1,27 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/opcuasvr.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 734 bytes +""" +Drivers +Created on 2021/5/31 +@author: Lius +""" +import argparse, sys +from adapter import OpcuaServer + +def main(argv=sys.argv): + ap = argparse.ArgumentParser(description="Drivers Server") + ap.add_argument("-s", "--service_id", action="store", required=True, help="Service id.") + ap.add_argument("-c", "--config", action="store", required=True, help="Config file path.") + args = ap.parse_args(argv[1[:None]]) + opcuasvr = OpcuaServer(args.service_id, args.config) + cfg = opcuasvr._config_loader() + opcuasvr._run_opcua_server(cfg) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/src/qckfs.py b/APPS_UNCOMPILED/src/qckfs.py new file mode 100644 index 0000000..1d4da90 --- /dev/null +++ b/APPS_UNCOMPILED/src/qckfs.py @@ -0,0 +1,46 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/qckfs.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1414 bytes +""" +Drivers +Created on 2021/1/6 +@author: Lius +""" +import argparse, sys, os +from quickfaas import QuickFunction +VAR_RUN_PYTHON_DS2_PATH = "/var/run/python/ds2" + +def write_pid_to_file(service_id): + if not os.path.exists(VAR_RUN_PYTHON_DS2_PATH): + os.mkdir(VAR_RUN_PYTHON_DS2_PATH) + pid_file_path = VAR_RUN_PYTHON_DS2_PATH + "/" + "qckfs.py-" + str(service_id) + if os.path.exists(pid_file_path): + os.remove(pid_file_path) + with open(pid_file_path, "w") as f: + f.write(str(os.getpid())) + + +def delete_pid_to_file(service_id): + pid_file_path = VAR_RUN_PYTHON_DS2_PATH + "/" + "qckfs.py-" + service_id + if os.path.exists(pid_file_path): + os.remove(pid_file_path) + + +def main(argv=sys.argv): + ap = argparse.ArgumentParser(description="Drivers Server") + ap.add_argument("-s", "--service_id", action="store", required=True, help="Service id.") + ap.add_argument("-c", "--config", action="store", required=True, help="Config file path.") + args = ap.parse_args(argv[1[:None]]) + write_pid_to_file(int(args.service_id)) + qckfs = QuickFunction(args.service_id, args.config) + cfg = qckfs._config_loader() + qckfs._run_faas(cfg) + delete_pid_to_file(str(args.service_id)) + + +if __name__ == "__main__": + main() diff --git a/APPS_UNCOMPILED/src/quickfaas/LWTSDB.py b/APPS_UNCOMPILED/src/quickfaas/LWTSDB.py new file mode 100644 index 0000000..2516a5e --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/LWTSDB.py @@ -0,0 +1,61 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/LWTSDB.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2629 bytes +""" +LWTSDB +Created on 2023/01/09 +@author: Licj +""" +import time, json, uuid +from .transport import mqtt_transport +import common.InternalTopic as InternalTopic + +def insert_request(table_name, insert_data, noack=0, callback=None, userdata=None, timeout=30): + operation_id = str(uuid.uuid1()) + topic = InternalTopic.LWTSDB_INSERT_REQUEST.format(table_name=table_name, operation_id=operation_id) + payload = {'noack':noack, + 'data':insert_data} + if not noack: + userargs = {'type':"insert_request", + 'callback':callback, 'userdata':userdata} + mqtt_transport.async_publish_message(topic, json.dumps(payload), 0, operation_id, timeout, InternalTopic.LWTSDB_INSERT_RESPONSE, userargs) + else: + mqtt_transport.publish(topic, json.dumps(payload)) + + +def query_request(table_name, start_time=None, end_time=None, filter=None, limit=1000, offest=0, callback=None, userdata=None, timeout=30): + operation_id = str(uuid.uuid1()) + topic = InternalTopic.LWTSDB_QUERY_REQUEST.format(table_name=table_name, operation_id=operation_id) + payload = dict() + if start_time: + payload["start_time"] = int(time.mktime(time.strptime(start_time, "%Y-%m-%d %H:%M:%S"))) + if end_time: + payload["end_time"] = int(time.mktime(time.strptime(end_time, "%Y-%m-%d %H:%M:%S"))) + if filter: + payload["filter"] = filter + payload["limit"] = limit + payload["offest"] = offest + userargs = {'type':"query_request", + 'callback':callback, 'userdata':userdata} + mqtt_transport.async_publish_message(topic, json.dumps(payload), 0, operation_id, timeout, InternalTopic.LWTSDB_QUERY_RESPONSE, userargs) + + +def remove_request(table_name, start_time=None, end_time=None, noack=0, callback=None, userdata=None, timeout=30): + operation_id = str(uuid.uuid1()) + topic = InternalTopic.LWTSDB_REMOVE_REQUEST.format(table_name=table_name, operation_id=operation_id) + payload = dict() + if start_time: + payload["start_time"] = int(time.mktime(time.strptime(start_time, "%Y-%m-%d %H:%M:%S"))) + elif end_time: + payload["end_time"] = int(time.mktime(time.strptime(end_time, "%Y-%m-%d %H:%M:%S"))) + payload["noack"] = noack + if not noack: + userargs = {'type':"remove_request", + 'callback':callback, 'userdata':userdata} + mqtt_transport.async_publish_message(topic, json.dumps(payload), 0, operation_id, timeout, InternalTopic.LWTSDB_REMOVE_RESPONSE, userargs) + else: + mqtt_transport.publish(topic, json.dumps(payload)) diff --git a/APPS_UNCOMPILED/src/quickfaas/LwTimer.py b/APPS_UNCOMPILED/src/quickfaas/LwTimer.py new file mode 100644 index 0000000..1f8c965 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/LwTimer.py @@ -0,0 +1,109 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/LwTimer.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 4141 bytes +import time, select, threading, collections +from common.Logger import logger + +class LwTimer(object): + + def __init__(self, func, job_type, args=None, seconds=1, id=None, replace_existing=True): + self.func = func + self.job_type = job_type + self.args = args + self.seconds = seconds + self.id = id + self.state = "idle" + self.shot_times = 0 + self.last_shot_time = 0.0 + self.future = None + self.lock = threading.Lock() + + def job_handler(self): + with self.lock: + if self.func is None: + return + self.state = "doing" + if self.args: + self.func(self.args) + else: + self.func() + with self.lock: + if self.job_type == "interval": + self.state = "pending" + else: + if self.job_type == "oneshot": + self.state = "idle" + + +class LwTimerSched(object): + + def __init__(self, threadpool): + self.running = True + self.executor = threadpool + self._jobs = collections.OrderedDict() + + def _get_uptime(self): + with open("/proc/uptime", "r", encoding="utf-8") as fp: + uptm = fp.read() + tm = float(uptm.split(" ")[0]) + return tm + + def shutdown(self, wait=True): + for id, _ in list(self._jobs.items()): + del self._jobs[id] + + self.running = False + + def start(self): + wait_seconds = 0.1 + while self.running: + if wait_seconds < 0.1: + wait_seconds = 0.1 + r_list, w_list, e_list = select.select([], [], [], wait_seconds) + tm = self._get_uptime() + for id, _ in list(self._jobs.items()): + tmr = self._jobs[id] + with tmr.lock: + my_shot = tmr.last_shot_time + tmr.seconds + if not tm >= my_shot or tmr.state == "shot" or tmr.state == "doing": + if tmr.job_type != "oneshot": + if tmr.id != "future_monitor": + logger.warn("timer %s is shot at %f, but the preious job is still in progress." % ( + tmr.id, time.time())) + else: + if tmr.state == "pending": + try: + tmr.future = self.executor.submit(tmr.job_handler) + tmr.state = "shot" + except Exception as e: + try: + logger.error("Error: %s" % e.__str__()) + finally: + e = None + del e + + tmr.last_shot_time = tm + tmr.shot_times += 1 + + tm2 = self._get_uptime() + wait_seconds = 1.0 - (tm2 - tm) + + def add_job(self, func, job_type, args=None, seconds=1, id='tmr', replace_existing=True): + timer = LwTimer(func, job_type, args, seconds, id) + timer.state = "pending" + timer.last_shot_time = self._get_uptime() + self._jobs[id] = timer + + def remove_job(self, id, wait=True): + if id in self._jobs: + del self._jobs[id] + + def pause_job(self, id=None): + pass + + def resume_job(self, id=None): + pass diff --git a/APPS_UNCOMPILED/src/quickfaas/OpcuaMethod.py b/APPS_UNCOMPILED/src/quickfaas/OpcuaMethod.py new file mode 100644 index 0000000..c9d62c8 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/OpcuaMethod.py @@ -0,0 +1,55 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/OpcuaMethod.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2281 bytes +""" +Renjie +Created on 2022/03/08 +@author: licj +""" +import uuid, json, base64 +from .transport import mqtt_transport +import common.InternalTopic as InternalTopic + +def GetName(controllerName, callback=None, timeout=30): + userargs = dict() + requestId = str(uuid.uuid1()) + message = {'type':"GetName", 'controllerName':str(controllerName), 'task_id':requestId} + userargs["type"] = "GetName" + userargs["callback"] = callback + userargs["userdata"] = requestId + response_topic = InternalTopic.EVENT_BUS_SOUTH_OPCUA_METHOD + "/response" + mqtt_transport.async_publish_message(InternalTopic.EVENT_BUS_SOUTH_OPCUA_METHOD, json.dumps(message), 0, requestId, timeout, response_topic, userargs) + return requestId + + +def Download(controllerName, fileName, callback=None, timeout=30): + userargs = dict() + requestId = str(uuid.uuid1()) + message = {'type':"Download", 'controllerName':str(controllerName), 'fileName':str(fileName), 'task_id':requestId} + userargs["type"] = "Download" + userargs["callback"] = callback + userargs["userdata"] = fileName + response_topic = InternalTopic.EVENT_BUS_SOUTH_OPCUA_METHOD + "/response" + mqtt_transport.async_publish_message(InternalTopic.EVENT_BUS_SOUTH_OPCUA_METHOD, json.dumps(message), 0, requestId, timeout, response_topic, userargs) + return requestId + + +def Upload(controllerName, fileName, fileContent, callback=None, timeout=30): + userargs = dict() + requestId = str(uuid.uuid1()) + if isinstance(fileContent, bytes): + filedata = str(base64.b64encode(fileContent), "utf-8") + else: + filedata = str(base64.b64encode(fileContent.encode("utf-8")), "utf-8") + message = {'type':"Upload", 'controllerName':str(controllerName), 'fileName':str(fileName), 'fileContent':filedata, + 'task_id':requestId} + userargs["type"] = "Upload" + userargs["callback"] = callback + userargs["userdata"] = requestId + response_topic = InternalTopic.EVENT_BUS_SOUTH_OPCUA_METHOD + "/response" + mqtt_transport.async_publish_message(InternalTopic.EVENT_BUS_SOUTH_OPCUA_METHOD, json.dumps(message), 0, requestId, timeout, response_topic, userargs) + return requestId diff --git a/APPS_UNCOMPILED/src/quickfaas/__init__.py b/APPS_UNCOMPILED/src/quickfaas/__init__.py new file mode 100644 index 0000000..de7069f --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/__init__.py @@ -0,0 +1,18 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/__init__.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 385 bytes +""" +Drivers +Created on 2021/1/4 +@author: Lius +""" +from quickfaas.quick_function import QuickFunction +from quickfaas import global_dict +from quickfaas import messagebus +from quickfaas import remotebus +__all__ = [ + "QuickFunction", "global_dict", "remotebus", "messagebus"] diff --git a/APPS_UNCOMPILED/src/quickfaas/__pycache__/__init__.cpython-312.pyc b/APPS_UNCOMPILED/src/quickfaas/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d56362a518cd99ef3a32b3dffa78dfb3e187278 GIT binary patch literal 472 zcmX|6&riZI7;VR3FvNKD1|EqC1=Ncn1_KHth%*KcyEJ1R*to6Qt|oXh{t@22`M3BV zEaBwIn@YTRFn06Dm-O|$@9Xzo`cx{JK$U*?HM2DUo)hs`N~g7KtLq*RU;_e)=EFd< zHJI>>p9yqZPr0sm*3PCpt9Z`NK`;X(_W*5UZdCI(n`W2OYmW=F!#yYRNQH%!X1&=! z4YX~x9T}Z7zFQemDa@B7xeDXbiKOnup$y%KvQRaOGoMWy9~0_EN}7QugfsIdQYcm6 zUcjPM=scqwxSl{Rfvkd2H6I%o6Xs$ZuU{qBQ^ygtl;P=bO1c>r{>MKi$$sjTI5@tK z@grEO9{4;%W1)V9Lh3TqWv&doFcPRoqrRM=zJ&*2-w6pG^mgh^v~O9bcs%MHA6bLp zK^F<`qW>s_F%4;iaqV&*=UV|I()afGs)~W?h_Zs$421A)O^5o23E8B`kRHKhe#`Uf<)haO7F-_Z6@kweVj970dNH8>E+A>_?Uipj^J=RPs-y?y)i zK7X@+>Uu^J^zF;W@2VvI6F&@@Je2zvLD`b9RF^QqatqZ_Eake46|A-tU#+WX@Gjve z>q*cxoVbhX8cyLP$W%u*(i^jx1>)Ut3C+wC*JduB34@vZT>iWXzkKGR9kKP0d@-}^ zMKrS~l9%@oa0BtIf15Fj5j&vmkT7Rdns7?OQQ++V0c=ZR;x92VVU4K?t-Vd#L&Nsa ze^c(tU1;Jbc~fE9$h(;uX$O)GMh1rFp#$qkW8)*Od3dB9$gVQV2ieds^7gQ@O|`3% zL|4Vi32?4T9o0yb4Vi1qrnf8)^Q037YhL5N%v0_iH(dkMz|V9;FS>kJQ0cb6zv3#)sAkqJi2cAE~R$E<$5V#E(z>bEo^%ZKUug^DA%m7 zR;yO^TD4YKu~sV!mudxT{&IP-ROD)th5=9D$Zu04LBy-`Lg0zT5a_E6PZVXyHS)2LX|7oo4js!HR4*Fm z>DM2n;SF;_-w%UxZ(ON(9x+1W^6yJ={6SNx3x0^Bmir|+1@Xkkh~|L(t007)9#)b1 ZJR_kakEJt@rP+N|LsQSDG;|~m@Cqr6JKz8S literal 0 HcmV?d00001 diff --git a/APPS_UNCOMPILED/src/quickfaas/__pycache__/controller.cpython-312.pyc b/APPS_UNCOMPILED/src/quickfaas/__pycache__/controller.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7aac1a88c25c7cae301dbcd7cb8a3387c7cb8200 GIT binary patch literal 3043 zcmcImOKclO7@paOD4YTV& zkX@?MLlBif;!qF=f)Em@5`qijScfAQn*`!6K|+EHx0G-|MMC_u_GA-U!38$jfBu>I z=byJyxi{L|6M?@T_vu8=cwQsN<&jiQcmF~&kA<=6$Y?V&D)|LNAG-XEY9y)89uXWK z-c3J@-asytN2JTxieM)!;5<&#xW7Gyzy|WD8Qhz-l^EsmB(n3^ap^j?!fx3vZ1p7S zM23^icOi1Lz`2m&VIygYd3LMdJok!Y`w0|O;4X7nXD8eT^Hq4FPm4{VBPHI_$Xqk*kWZg&@IbEJkq%x|q_aQwTt}}&9N;gbLVs1{&DyDyM zeC)!L<0B(yo}7$NUWi?|IB7cO6PcWFXmFo7p4;lJ!0?HJ-1-G0T;Cmi63^OuYL{86|^CEME`A1 zaJ@P7UgP5ViqyC!h00QBwKe+LgXPw~Rq14)wXdz{6{S1LeShl|W?08!b7b zzpr}&zamleZ*YRkzb^UT9C>}D)O_O0M7jCYs&x8CUtlqI+gJbl29N54mV5VY{IkdV z#*r(de{6VRfZ3>a1?^bJ`rCP<2ZKE_1@jQo z3Z_z=$1m)Kvf%4do=b8VO(Fp;AP_VXfZ0zFNRG!Yr@rG7tox=H#-yiu1x@kk6v&Ic zDt>7pZpC&$>`@xu2IdO-zOjALUGW6pnl5`nOKmHjC`iONQs|C|c6V$-2)aK(KfQ;b z44BpbseldoKU+^)V4mF?@ByekjAFDN_ah<83soDfQI{_W_9q7D$lt8hyCCL8(gpMq z6`odh9uW`o70#-)pm2RKqX=w#M~qO55v&+7FIe($GB2dJ2wgii%^|Y0gFK9H$pg1d z9ym)Lwj{uk2M5T*7M;^nzyJ0qj=bX@d2n2I)Z_uY9r6HuBgj&)YFQsh8fnWz-58M} z%ydr>P01yVC)5Q~NRS!bbPuWXsiZnawSz*gDj*(0c}-|)(nt-}53-vY3L?@(%}xm+ zw0z;Zs)97nWVB0(%r+3})SphxkO%3~c4}I{m^=(um4qsnr6f#CTBw$!vie+(Y)iyG zfRs`F@ah=gNF^l*lG63ahi5)MQ0_Wfa<<>2FVWaqbhsQH{w{j1EIqn7{!V<|zpu1^ z=$5wPe`;~?w%1>2Xj?kCH2=Ygazl6J&|_aZK7ZntR6aEN*7(v$IneWO)Av`aden0s z-wFKBa&-hsaW&RE(141E_kdll$J92s1_S(ZsA=FBw|rO_^zzHc#DRWsxt|CBE3XK% zVLTR=%({47(US4Fb?4@;q&7RNWxGeL-=)>xK#tl4TF9W6T?GXNv8C@-WYB9NR9{FZ zHP%PL**6QQ%c%XN8^++eklq7kgU1-(@d;SibR#VNfaD+0?%ToUx1TB@|EA!@{>`p> R?EiHyk6V9}ny}wW`4^pYqHO>G literal 0 HcmV?d00001 diff --git a/APPS_UNCOMPILED/src/quickfaas/__pycache__/quick_function.cpython-312.pyc b/APPS_UNCOMPILED/src/quickfaas/__pycache__/quick_function.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2a7a9251bd60951ee5f62c477821ff8f35fa879 GIT binary patch literal 7836 zcmc&ZZERCncJJBG-*ys$ah$|}LqdoXl8_L{zzpdC2}vfwFiH6G%=Fdc=L8#ng!`T+ zka1>Ei_}o13#?j^s4Eeqk;sUVmiaYQsw&j!54ua$m^iZaY9yrg&-`OyBvw?add_`* zc49Ki&i?6zoO|xqdH0@szV7jFZ8kFlMf+F(JMsAzhWRgSs704YUj7DSGeZn~$*_tjk1cq*q`%)KP^S&)e4qa^5;f`U69b9e6P>}c<3-)nx~ zpODVS$wBvUC?T4Ez?EJPE9)ciiHRWjfrU0S&7*xIrw5Pr@q@k4syuqaJ2rA+c(`w5 zlph^CJa%dndds30C5e}aKPFDbi3B~CV}AeWfIlWgf&`isycm@DiAelYe}wmw2@(3N z!&76SXpoEs&Ibi(SB{;BO$sOD@krmLU?3sI$#hHmXMWO760!El%dpp2yC8P92jVe_ z#3Mji`YRHky1f5ei%w_xsmP4JcevV4h{n%Wcs9YY>1S0~gQuG_%Y& z2*6dNPdm$8Vtw2w@M3uwe%fA5d_v|f3DvV0%=a?k(w;+>A zHpC<~U$MVnSw=FzbXvkj6)V;PdoNEV$?&SoGM8af-X8>*=F!VKF)x7n|MAD@rxqQiyV-0yWLk_ zf82gb1ctZALVw_PNG_QFjl7(bqf1*A|s);4*WY=X~ByMKbUZR$yCVV39t@cS!F&Qz8zZ(QP1 zru`4>%f^9+jwi-}T&?@|y9@6wiYv7(v)roD`4T2G9vw0x=8pz}8D^f3#)U)#aVrmO z@JEypx(C^oKYAb-iSWFKB~JV%^8I~M4UqDd0&I!M>EB;vUU3@T?&k~uQlm~IEG@gP;d!yln6oW!r}+#>es;3*osc%v|(mNafXUPZ-J`X$1r?hm9$yy{Ws?pjO&xWZ&>8fLk<@@#G6ESEMqbJe?`S(dGDoipEf zFJ*M+YC9iXOxGUHkM3L)GL8FFjr&)syOoK%?)Kd2ndPpRT`NoD^u|T$?iY8yxG#P8 z#kXHPlrsHiQvGLEYR^&vS)(;$+?_J+UNyE+ompo;QBLs;FaHU^TI(5xc4?6c9pQp) zEp*}i7Now|tNKZ>xdrWb3+)Z0^lh|3c33aiSiN#jvVL^1f6zN7Z!5h}Wt>+p+yL{& z`bLg}`N4~&=0~|qdD9!WKfS&5YD>hU6-jgg0HbLvyKcK?`@8X5(VNl5vCPha)Xssl zaj@7QyBWia9ZWS2t{Oj3ZYHlalzRnh6Qk^f>T%=B?7&}qBmbW%w zvZ(QaZODea;I6Yf5cB2jlPcHK+e#&Mz1(-e2E?lX6hXCIvn<rJDC0!oe2 zKmX~XzoD3`7p>M{lnU-|Bf$+9#0YkjFB>b0N>R|DK+UXi^c&JGICYgOb≠rb?H% zrGhH{@y;C#K;BruNsI0so=^h!jZS^m>|+<3Td*D$%TUqZrbO z3jo&WRcprHm9lr;AIt3POYQ4hu^&qt`*RNGt%;ixx8gVB1@G?YUOqUKatzH{au(b5 z?rYuGd#?4&H)Lv>Q#H+LOH0mLc75>L;0+<;XiPa8mn>;(SI)NSM$5do^wFyAz=Jcn zs@hwZZ(d%kTB-8P4!qJbWe1;?SIw5c(y7>M5kts->2w^e)P7&7JL)p5i*S+0RPmYt z!3ejP`2_6ej5Z1HaSX$gnhRjOM0U=a)CSknm@TbTcu|D$`B+ktg%{NfS8BGt=|Q_l9DtOSIu?7Y4t)y{+nm;n?R|hWC=9YlO_$qCVJw$hRqn0 z#w+aHwxm(0#C~Bjl?irk*IV{pQQXB(>>UH^l9DtejYTIkcaHAf{{OIh`&)P4Ub1y) zzPWn|^A)s&@dZ&*!uv`(pedpEqeb1pjOi^@4yY@Nb<(7+7M&8tg|vhZs#viWwj@pK zqvc{qQQV8*5USScs5xm4A1YE&u{E08qxKbR!6_Y9ThdV#3m;LjVy)_qdJ3WXSA}3j z?Za0}NzlTCdy6zwEZnbR#agA0HncR%xuKbBP{N#gMygTQG%y76&JwyOT2u-%+&KnT z9Gc39My1r|hVmg(jTbV*{!fDuQRbo{F+lUg!=FWDU2or!Q~feG=sk8q=FS`*@yZ5- zgWi5w*Ece9VuT>42{M%Y8G>O1UI4P`qMDwStrNjmkc0x5rOwpCXn+CGfk>F>+2!>!26AcD znF@ER!u?g%UDqAgYDL?#&D$0mpKNYg8hUWx$?o2qy(VLCNZA{{I&=5qJ0GvwJD*kU zSe$rL)tYl|%Q%}-&Zcjwe!cbUt*g%N=Nwnvk*nL8ZanlreDu+3-MQsYgj`$auR~vl z9vD{I-e0spn7qTszBDttI_J&V!$WCj^U}fl-H?QrQHYSIoQ^1%YtRmn|AKaI$g>Z zGtMI^=aGk9tImPvI;QU6b0bsNywsX@zcbHe-3=Lccgmgr&_n6bP^$mziu>HWF1x*9 zVFtkB*@Z9f*FD+3H|uW7xpv$hS{PcgrCs~8E_cSYJLTG)arLEKeGi34eXFjK=QgJ9 z!1D_HLVMbM@K63idD`{PnlJRHT>X!@N5NIs7=Gd1A1j#7uJ5Y8t@>{Jx7*W)Pp@>G zS+=+3YPa6*Ug*Bvv(U5L+?%fL%QZl z#@>{&H?7*6Uk$Q&ovFHW|MTD5n_e(1Tz_-hy~ckSdaech^{e+;T((?2^jbs>{Jy+$ zxRLq3&M{oA{l41>_;2 z|DQ7Mr_7G0O!HG_C;jhv%G5n&I7%Q6rR~1|Ln&3u^jSq00%dfER}3xl>-6My0J-&9LL0QMZ;mW+1;X5O25 z?|W~5j6?)P$A|w^9&rf$=702o>95BtAnYRzZ6Xb8OcmFdO$Jk$t+F+4lLML4cmpdu zgY>{b-%JT=fsfE;NDBgnwGg0CXOrQ^6(K{6yE?IjC88^iuEi}go|;cBNDESGUbwEf z&K-*^#`A`23x7~=N$drxR;8kof+yHIxofDpY-q+Yi;&vC&R^)0L5GfG0Q<=Cl@v5Q z1z18!ra&N0>QN|V*Kg*QvT`o-H9Dq;PTrQv;N6x?L$#!grMflUbZlwaa8}$cX{9LV z%oWAdA^ESBVQVQFo(n9FA~l1)_UJ=aj*y{#y=bpFh_ow_$!(H;40Q$F7$ zH2+bUb_Ae>eiz3-uXe;ln@f-oXjBX^7(57mcwd09kHQxMY$6)qAj|}E2JE3rh)8=F z&QfcCYzGF?`KmEZ+yMPS|32Up?cw*a8A|k}duW%vh3?^7{qOofj|V$Rw%~>Pq2;v3 z?6U5})vD!can&+SU3Cl~+IAGjwc|P=7MY9B+DXovNM{QhS^12Q!W(PyYPwkR!r5I_ z-=SuNhVAjbRg&=p->qyaHQmFyO=FG6Ntm9A(M&596YxToX@)0Y+8Y2Z)Z>DDv^#S7 zL8CL0diZK*aXzb(klhLuK zJr<3P`{K!$`0t(wCMWg`IQ~E!MxTb$0rvjnRpud=#_YHJEBtpX2tQyR^v|$K>(FP@ z1ObZmjOpeX3wzk1C;Tz!t3Ae{mj3uZCbwxC{xh1m$$Wzk`ZeLV0c3GlsX%>~PWvgR zc|%aaXJ1L4(4_hG=|ys;qZ3o9Zdf~p>djDk*wFS=KC7mc58-)oPYGm-qiIr!^Mq>z;|2JO>vN+BP1i8LC5Qv-+tyO%i<= z2~wavds>L0*J_sLR`u7(3!tOq+E)R3EXMfHQ4Vu|2?)>pg0B2LJbgdYj?A=#C)3Y= Yb-9fujyMKiIugP-dOXSE2KK%F3--cZ7ytkO literal 0 HcmV?d00001 diff --git a/APPS_UNCOMPILED/src/quickfaas/__pycache__/transport.cpython-312.pyc b/APPS_UNCOMPILED/src/quickfaas/__pycache__/transport.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2d4b753ab5f41bb9ef8ebb7d30b47271fe76ce45 GIT binary patch literal 31661 zcmeHw32+tw-L z0mL$N@_R7=)OuZdY}Cx75@ffMI%?lsB*;?j?+vIl*?nRj2Z6vOfHnPFnJJK7pXTCV+A9Wvmjv_ zIygEy%6g^(I2>MldGXRh1i$0MW%zQ4h4YkK%;TS=(j-s#z7)Zhajczj1@&UWIKh4K ziN6Pp;`4a67)#z$a?Qz9F&2UdqY^RfBcWfI3VoJZ4O+$W;{{?Yc^3x`YDhgH?`0Cp zeBt_1|4IFv=dLfkA*rAH-1YOM8r5Pf%+g;|c6aP@f@nI!hCV;7Q;h z3~+<#iG)7jJ?Ud+Cwy!|#RM7?>hWp+R6+yf!AwskGy%`#%!Ci_LDoNYB4P3bynerX z!WRqzCrW4-{|SFEkjQd-C;Yytpxe(R^gMJdtsb2Ao=W7or>ETB>8UB77r2&t0@Lh} zH2Hckn38@HTEt#hu{h%oWYj1TYouj~mCRUa(iaGLPWbp1fIIRnh^eg4>jVDDxWzho zeE!Vru?c_RBwtGn(O{y)?F$4we6sH}z8C8Y%(#QoGk$L%p=SKvV8RlZoAPFsOW5U7 zdC*pmTwbC`Hkq%I@AeW_FANnK;)F5nNh=J~RYgJ_@J$>KAR}-*d9mSe06tK|l;1nu z&^PU!orJdtH0<^V_skw^*mJYhY*v=7d=ScL(%dsO~4*qs}!@lE@#uE|eNv`N5RLNP{ zxcPhaAJ#`UHX*#SxN{5MH?36FfA7@~UyW3?;^W46-B!FiR~*joWq+6*agfK-_|_Io z-jV@irEcp-1AO_)n(vTU`HHjY`x~xqh&bCZ1K>Nqa|B}>S8AKTUw^eeQrm%#)$tNX ztfYl2X^E6|<=dW=|e26I&)k-O- zpr{N1D`ylUix1d>qU@#4sphG7fl1C&BI8d&bxXCAX>!8Ht7Jf=F_Z{jL5)xqR3jBo zoB>7YDD(rF$Q!Do)ER|SIh@eLhX<;d^7@<_76nWqTT0>4ReRDC@C1Wwf)32i03~bP z?kV3Hx0~GxrJd@8$~ytn<6%z#<2ZGO-1TnY9G*bH?GC7+ki+44i>-qQ+X6wznu%fr z+!n|RbVBg?ThxlFC}wg-OwN`3qD#$}4bl9%mD+}@)mIy?S43*Nf7cW>SFhwdV)?aP ze(hr|m0b(P@0grV0tNu^^%j>|b;C@%?Ao+%h9gMSfEA#ox2d;vZ|mRAdfV`}@om%F z+3*D=B|z>A!WR?rQ@`Z6^z8t)aGj&%^d&|a#GxRU+;{s}C90YWil8K@5_Y_k zy+|!)A6J5|rCU%6aT?+()VLPZj21!~RoMj%=w8%2hD8HtX<1*@y^{sH7xj*QQNNf4 zX)4gcbnoa;2Ybh~Xo51?@N8VvEgBb11TN}ex&`gLa$ezRjK;YoVOdBu3CST1>I%uH?)hf}`IhGZEr#^fcx&AbA%!TgXX3y_Xn zyjtxDp7c2o?;N~Hb4(u_2f^h4Zs`C-^`Dq>cqS$sAk0wOuyYO~ARK~Fad;u~n8$l6 zP%ps^H4(@l5Jiy3r&))AYGFzu8Y!UEMfNu^IM8I+XKaY@Nc!V&wFgm#oVf8dP3H`nE-pqg!v6dcDH zfAFLORE*h)Am9b#>u`K|A(SU#2u$;id;AkVrashkc*^(24AG+;K9)r^0(?X45w#r& zsHo;Rvsh$h37YX8n>~@x0PanCf-H*Zgcg|a6x3#m@JZkQhSxU}L>q^#}PzO)?1MFr5X+)L8ea7cG#rlp1 zaDGH6?Jb_kC$gZ|LWe`G8N7b2=2PDZs-tHj&|RObwgW&0K7asZ!M*&lZ*(vAtr+v( z8$34{wztRZyEyx<<-CZ!FKXKCOVP#Q3&Y`x?RPD0pB5I!3R~|K zwyrAGc9(((_YSV4BU;k=*hJ+y!?`t|Kgy;Y&Ignd@>fbWetYo!!Laj{XvwR>!^ifR zvx{?fMVvc-RkCZ9#&S;{=kS%E1Zts!Kd$Okscw{+U2V!6Z7#Z(qHlI7A?fB$1;lSr zDu~}w(Y;2k7r&QyNksiYlBj7B=@wKeNdCo%S|Y+E(wWiRgFt5UFiBOiLB_Xkfq$LrNY&n&=q_itjGAPYY$EA5^5{@{!$j)Z0z!!9&KoL2BTz!!VsQpt=bSJzkixWw<1OpcaJs9+2 z(1*b;2tdv2$M^sSdob9G0TMR54}t*VCepJ=&L-$Wvh`5g^HFrNCN>lJJqXf?`h{~Z zgl#)wwjR#bv)mN14MdH*2~ls32cRD?W=SeIh@D6F3|P+( zN;VA2?3DC&q4wt+mXxYv0QO# zGMX7uJFC8nhIsgWxUd zKWuO=WySMLFHLg!b=M4${2j|#w>!99N5W&rBD;>oi>vNfs_xTNSuHnU7#x*32D;Y(2o`GkWw6RSvrO*W>$n&H;qatIiV$|fW=I_!Z zm7b?zT7VQxu6dfFv7E@VSe=+M4>j@I!d-(U7%hH1R>|n56pVgB6O`=hP$EmNO_)5Y z83Uu1uJMH%Z z2A=k?ZUR=rPXmZNhEfhF#o~J>R1>}_b|b0enVIoTF-|>LB2o2*dE)deLpowYJLU9L zz~?48S;@CZXoZ(xaY2KfBp{(x_BDtFP>(}|#!Jx6ixG=qHX3xFVf{g$duGB*te^-z zH1Ir_y*?Ztw*9vdfS_L~aeQlFaVWlF$} zSGg{C__ax{YbrbwS_NED^eFFB)a!~~B|P;j2k|zf9KpnX<*4!@MQu8)yp(mXxN4Pk6I{iPXjx~xsq^~FT+_bG+AEq5bK+HXS6=3-I<9-}RPB6!bor&Fft8{SSG3o( zTus~ZzT2m{z9V7gRCt2rm;f}eOF5v7msVbWl`CzzuTnU+J){)H+n0JjwUoy!HJqj9 zO8#BTmizgXW812VDr;J`QI_(s(ZLg@M0)Vzl6rp=9#YA29G){TkyQOas2?a76hN>d zfr3i6pbR35B$rrn9u_(Yp@8KF?}WURJOKh(13D4*NyrVDrdA*-9U&q82Pz=*3Xho(R!9F4+0IrCe z@=mii>BWNW9E2Df*L<##bH7+yV6(MExO~FG( zac55Qc%EQCLsr65EumEud_3tQ&rWCsB1mX=`+`LBXbuytDWRF34bIF46S}_XGgGKX zfwf2-oP*^6GV6RD;h;QYrx1?jm?z-d){@X4+t$MPP=10`qlrZ|p%t{x6!Ipbi^bY+ zVsILR*C0rdUvG(%QLFpdtbc-W^UG@@uL$jW9+H=X@H^Pv81QhAS@9gpd!cioaB0Jx zoQ8P5?PBMJ&dZH=@+*;+C!?D>FJ;B68?V(ytGh1kj+a$_`^|5?d2dT!wEyMsE5~B5 zOmOhmKN+6-GS@#3V&zT6UKC&Zlm}3Jy{R~ePb1*@0=F+IJ(xJ6Jccr@-mAQvm8jVA z!-{L;%O%{l;k!-y-6?KfscVYWb#ir` z*I!(IiR&7U)a}1Cw5kz`s;Dh3mxki`4oOU=vL9sWgn9KqM9}?VsstiW5~gzO3KtqJjBg>PRYCx|zrAdUUO^LwlgoErsk&yro4?~;aT&1uBFCx@ zzSvrZkBpU!*`GlKb$&(x;{|d4k{hp-Tp6~Uk}ENA5gJ9EP%Wr{1*&1GM_N*33xLI; zZkcf&G?3)86kQ2)q*{SZx9h4h1m`wtyWCX2-n^$+bo(w|JH z12Yr;AgCGNfM!{g>!(ovDLq+GL(6*kHs;l0D+}~FgPf6>te4TyZyB>V5Wxn}yJhp{iC&akJ=M#s zg{@74K1fbZ$w89(AUP~0^G;Js3eW;o<1%@C9~Lz5{yLm;OYf?k*NmeDE8_x4)zE{I zjAVFZe<)vLTxRWd+_APkqbB-Kmpt_GinTT5+kZ0<6G92oloH*>tT5paF?~qQft#vln}IBXIzX%o<{&D)6T0Q)TQ#X zIcO8!VrvEvc;r{u$D7vXur*{cu6b>ae9P_r>&Ji`e%y3w_(foaCL+C)-aMdzq5KcCprYhZsxVMY)xP1`B=uXRY0Xr@`~} z){un%&)wTOO6;xE2&+Dad|vc$bOP^5H0y&L_85Z+3`f{=$Q-p0#5cT zFdR?vssfH)!Ewa_3eKcI7;>yxaTMoJ0@zTN1hylgaE98Sxw5&QCuV5FnzqnTLsqb0 zb+bOSWnIv6E|ty=DXUyWhr_uhQRLWih$ZxggMQHP{JucKFb>vO(C^*g<-?)?U{Pie z^C*j)pRK|QI;5Jv$WyA6YVT{CX*_FtdvoWMh>q);9^h zJ?`=8W9%+WMMowUrJUUItJ9QRb4nP=>_8YhehC(JQuYrp_!+TDE5%7*ILhGuR<^R>ZP z%O0*}Ph`VhFaeisyEY%&zL(p+_xAQk*$618_R^Srd&IsyUR-fG7^~jJ!C&#N_}&AT z`eG%mTuJM-lU&KpXvsjlWFvU~6}5%2?5@Wuh=Sr{&IUFym`1aY~I~#XYeh*w`V~%#t(SE%q>ga`(n4^PpbX;!> zJNj@4tEr;)FpifChp_!o4OvwG`f&IqCic>JB*@ZledwwLdS_ zV!YBt57p7Xa4MnDFE%SMUZ=u%13lEH{lyL!yuO5rNdWw7XRJ34n(Xdju;o_GseED)qq_Aeu`1fj~-lh2a5~JRy zcu=Cy?|h(9K!O;m^qU@KW$ViyII{F5|6sF0tcBLwA7#ViW7}5!ZpA7^LPkMttXnih zbsOT9jd(AQS2W<=5wB{-`^I?rW+)l|YED;uMRBD$s%u!$WupUn*t$Ea+Y`?z z3hRp2E;ga!jsG7W1H4IFY?4#*BvUpxcFLOR<@P&?D%HAmWUN8SI!`xai{;ko4a(V! zi%bqTr6^tIi~{hOv_+v?sXS~qTCBGayIPI8W zdDme-_;0b(lMdLyHw{KeKN9L3`l9gk1hqpB{}c~_-&<4fa7@keexGhWB~;QkJ>^69 z47t{v`V@-C$zmca*_DKTVtRVU9ROvK1%C=~>-WyGEN*(?L4zAVAK~4GaNfmlGT|+t zuuhD=$+NIMM%*WYdvVBWcgUGZi0c;pZwM9_17Q^naG6l!m!i6tVP{25XXkYGn68r3 zRl=@}l#~iiR}s}!tr#pZLj`B3h#0D_SYpl&&e;)j_HfRgJBA*~$!JH^*a@O}u`gyU z<&33KW0gcS&3APzYsm$T&V*c!0C_wQx#&}NaEM3?zWPr!RWnLCa!RfzrOY8Do0Oc* zYaG?Rj4LRSh6sf%0YsM}hVlH2s_@9DlG_XkS`AVG1yQIv z_(Gv#5dgoR05`XSua=_*wi7rLw1=7WPlfc2P220Y!oNmb{x~@eo88n{FjSi|#X5kz z3SG{@LCV3*;>JI5yF$IADliZm{JaUlcN`md*%$$ADo!w!r;#^=)s^Hf<%_UvOKVzSR^aa-NK*qEqPnfCN~6ADCBOI^9lT39@jh*h**ZB} z=k?KuttV>iUCGIP@65R~@0~q&_JgTt&X%~@5;K=`=JJ@ikux`5tG>QHYVIXIsvEi7 zjj`NXF1PlYVYxD#TN}yUz1Z_84^07B9{$u~TT)P7 z&O4-E?8)kNP`4cVz8cl7+M2!sRK$fw{6Gfsrg&$ZlUI88%wa8sTk$^=oxn3vt zK!J=%$^$uQ^f(IRjO$HFDND9Brvlz4$pqkS4GYRN=vf&+;E{o)wG7^+T2P)<65d5( zB8LLrMV`<9gukDWcvrD1hw00W3%{J^ctD5#iYD82lIm6%`$Cwf5hz$OS}IBd4QaaE}`6~(Rgm~|6p-4wGna@NL(wRuSi z{syL8UR8@3t2twJ)aYEvvBh$#xt!`qj`PZBY)dz{r8~A|fZH-~Cucz7f!$H_PS^{) zq~>|?U9!qSz6?IFc@7IFK51wl&Y1u7}Z=8OwmrIeylmt;bOwPQOOm)z<@3|iFrO%nm4*a`beL3_g~8?n(pgQZ+m4iAS)MKH)zpfad- zWZxic)POh~-=cz8sO}khoCw6nGnl$G{vl;%un!093_+Dnm%>GM!pGs&Hgu8U-!Fxs zAoFl)dA{)tEP5M*D;N+qau8#Lm*Cu5P=R?9&L@x%Dop1mQWFkjCw~Mb-lD*Wn_qTL zyQp03m8Oi%F-r$JMz$_jL@WbQ-EQD7`hqwZAq%Rxg6dd7BUjLPt?K$nxS%mo;5r97 z*pljzS<=FKW7d6~b>Ho#+kuF6!MNXQkx1SF7v`|4_CTBYCf=@re%OC zPXefWe(Z2HQ8$|OJ+-PEowgpE@}@=IQ>D6TqkBr}o24p@SJ9B0w&z8n+R;?11()34 zj%r2v5bx+Z0ZPgjrdhEbq1W_Y$k8h;ORuV=vw#Q>CTHq3^h!UIURCpS2E775J4+LK zC9xEGCC@;we0q{zHK7->#L3s;A|-~IB$>ii+;!zX`%}bQYjQ^q(kZDAn|bS> zL6*r{0%6vt5oSvEbCISYrD7&=YEp<(jxG^1_|f6X@@M#Z_H?pL0pEy(`5}~%C(O+; zOA}{ly4D)8v`2LvnPl0_6*OP#j}&yQp~(K2b&#_T-aZsQI2y4YzNg7O614k%nAvstnY_`w>Z6 z08(c?7iwInPrBmEO#>K3(%nhc#R!_gOU}TPj`h!I2mBw=VJhk7l-!*Q>FQL+1BND) z4Kj`J1sR$i&*giS8lo(pSADDz-Bm;w&*+z4ZgK+$L1@-Wb2#P;{)y?pEx^1Il%{AP z-M24jfm`5LA=kLvWJ)`)O&>RK;($Xc&T3(LL+)~|@HrwM5uPOOk?G7ZrxT7!$qKbf zTxA6I={P*%dc5Pb-?Kjd2$fY-R5%3ND-^>cnjB~i=>VIH1M)%z2l%6kM0mnNJo|_6 z;_P=YKo;awB^?)u)P1IHN7#GEYXr4vj7P{?hv^tuw7T(10hui%XAlf#Okq$10XTE> zT~A>`s6c>5u^Gz{4S}p`-^NEdcBF$0ky-N$=@*caY1R>%=|N`1n{_}-$j*zIDkG*! z$rRl2=SO2*hq6n@!z_a!E5_eG7z;{_Xe z#YK|0*UC=HmmwLgjW6I}GA?T0U2RXaI zzwC#j#v_kQsqCt-sq%AJ=EMa^4bDF3l-UR2W90{02As zkbbLy?rqWDY9T4xX)M#J!t|YVpHAx)^lAV}QIaYARe0b%iQzC7l9e7XN^o9;gZ&sa zL&Fv&;=#L4NewBfg@gRGCx8k+Pz(OP`eDG}9sukAI`8fa8-HL^0oW_E;Q%7skjDC6 zp9K%wM7H2mAv}Nfzo2|5=MbOkzzsuuJp*rPM(Qqm^IiPE$!2tt37 zyN@=f%hIP7K7t61Ix~3w=5JZl#SH~9LosJ44wv+;X&KaSyoPn{A6Z zt6Iuj9Lp)=a>~LL+qj%<@ye=rQSs%2*OXAOq-9C>X-?r{&RWNDRLF^tyAI*ggdEv5 zlZqx#5ijC9`p2ML%H;7|MqxET60345C<9_ePB5l3aVR5A^Z1rA1aH$eXj^6-Bq{u0 z*Bd2wViNeH%EFXgE0KgyvTb31RF^#TahX*T`S!qxUMc6X3vgxpaZ~+K|D{?M1w5!W z$uB3jDuO*nUY&8Je>w0+{pT3x8h~QRXb2vkbMz3`;UfO1&jl~3l)$LVtZA{u^6BF; zYgNgWKC;FpOF3OK-l%KT5b?w|IAN%u9~o0kRlo_yQ=JA=1J4nO&ma3;cx70l*#tM@ zSyVD}Pz@NzL7okxyZpRZ4oaWG@_ta4)Z_dD}2W8$g#G1`a`kePU+TH!}f; zr@?_~u*walAZK1e1&8P+)U&gYO=H-|bKi0V{0Ror2qwEL+gT)hV`BU8RIL-i)N!lbUA)Hz(%&R1i;S5&I>%kd? z&LFGaPgwQ6oWhmDq7PbdGw8QE(RXgoy&QYozTt8)XRlk_m%_^Jmo2gK9uEF;dSHqB zV)ljX%VXCLE@el}+sTqQuB^w*ZJfF7dQsHex68MnsU zb}bihZF|xZqsCU06>VHjMZ9}p`3wH`cOnG%9r8l8xz|!GMyDx(!+WEKwlEWgS{?k5Pf1&EH3h91_){7du zcp;?_?LGScd9*ktw{=%D0FB)11nlz(!(5=?QL zdHP$5-%s%%03)0h4#v@jK-)gm2UJ~^(*B0D7b6egKZSfG;{tO#&>vpn9kg6mP~we)4LQ6Ph3|4QK4 zhEQ*o$fR5j+yQcuou2ZCz;zPr^8ynRy{Lq}3gY>!Nlz;FQ|Kn)_w0Xy*l@@yYkCW1 ziE49)<1>`Pp^^*(L%9PtBs=^}4Xluz_`Ub>`teTUpzXA>{{>t5TMYgy29+4#he|Sh z_A`9MPeM5N`qs2vScJ%c1&F~R%M(7|Ou~kHh2^%vBy8Y_Et>=Gpqmj^hSg0H4(xv? z@X4+gvK1!fd<7VIVto_NKhI$Kp^fP(Jn2_PnfCxA1FaCeMcvndrQ+FwVROW^IiB;} zf}v^Gvh8+9r0EFq@+!_)b@?P`Y=9&Dvg=l}(zq)e8Xh<2F4Dvy^c~}$`(xclxb7p7 z?w4ZSuSB|E;oQf=))P_P$#`}iocXYH=5pSJ`8zokIJE%z6fWzEmGyCDeakOJ%7&xn z{c*E(#b|zS-?@EDf%E%`cS93rg$YT-+POGzFULv**%r=>Iys27!qO#E++x4vy0CY# ze>I=V$p<5!-W1bSaXRSrU0nkz>UCk$W?oDGQ&&O%2Gw6}p!=)S?Q|0JM`h)=Gu^No zfjDKSpUB_jRx{urar})I>7)fbXASJ@8EJEhOoEK^NG6#rTyYDR z(?aHnTe-rm*KAy2N377r6}leN@QpY`)fU(xlU)b(;E6wJ{M~AoPIW^^yK>k0U_XKO ze#;Nmm|~GZ1jxAHWM_Rw5vbQayXL9MtuA`3?5;Cbi(3bB}~G}{9fP0 zgxd{0y!j{m#(rRC~vA zU--~y*yH17f{~fi52*sW_TkoIdh3G@4c(*o2eX!LBL-~J5ij&XxZJKgVO2z^hhKpJh!R{--4Nq1Sns6VfFZ&Y7?Ry$Q)K^$ z8*7pacayl)X-|^rl;laoN}ePoo;=}9j9haR`#D5FW+PKe7zC3U9(cmyOpf2Hx`;8E z`C*zl?w_y_)=4+V5!4A4rcXPi-}6{TYUqJVU~scJ;MX`z@#0PRqZhV#;Rbj#f^!Yr z41oD?(~D2G5>iyS;SYeko4t0aK^kearDu+`_~r`oia;)ewNGOIv7V@iWWf>*E~>!c z=^=Z@OC+;?hOdDr3$IZCeS6nHdIP_aMTid2;D16U`vimEU_j_e0kzmy@Ub3)It&n9 zNbna35~=#D)I+tN-te$ML6`Cc2}(hZbcrbPuo99|uu;N9_*pSE;3XW633#aiywoPl z`{C3qqSOv~rr>vN*w859tfO!!`>B8f&wxZ1lWDP3fc!=Q9KH@eAppNrFzxk|UkQ`; z{W)NhvVQ_R`SBNhP*>(9X_|Mp^g#}he){oFEV~Ksj=o+g z&}BU}z9{rLwagUpHC_n;4-v_z2_7QDV9b@ctK705c$XSdK93NUQ<0ty=lOpsVoojk zmpFByKaseX!?TvWLG6Ln9RG1Y?65gD=NQ`C2b+7ry#zMy!8RTE6|NM@mNxs=XGzM~ z-s8mgL?Sp)vw&kvpqt5@ng@Fc|1!w4mLD-sd-y8edSUe|h ziHQ}obMOa8dZD(towIDet_oW^!HCG29iO(l&<08NA638sqmriD9V_kUO8b|+kR0+v3=79@!{cIh?AW+n#i%kCng3mA@E%=}VFFm!p!l*aAa9-`;$N!y!Lypyk1ImGf&%Vq;64kuU>o0=(6B`q>Ty) z(_B-eDesiXkYT1m*7pHW=JgDwT(%xl#;Gs3r^qsM!m{D;yQ$`&wn}2865~XsD`-n1!wq%I7sDOpC%{e$@vSJOFTw7=rL17Qb zG#zFiI#{z`!zs6O1>3nCIQXS3R@})IcU~Xiid{=O!Z}+xOY1dn*wVps&c6|@c>}Ss zA+Bubwl`9CC~6*w@90L3+RT}oul2&R6Jd1s&8eL8Xw3c^XMZj1VIss|a4qNL≪4 zmBRi^T!>AJ-=y& z99KflMoO3fBPL-EGe~~mnD<^W3u)5%PV65cn0%=y=mjJ2)1{zZu!w{Si;$Ccjwt(n zY9%r20PI;NdzIk`P+0ecQ~TizQvPUu4gV8y#0J4n56Qb{lM_#*QxV9pCzO-HjDbz) zI(Z9EH(LTD1b63s_fU4uc?|} zQ+2VHi&JW~5KingVJM-<*4A24MrO7Bw;Jlfc<*rQlF{fNT*{Wla;UfDOw-pY=v Z42z*VD*HWs!Pm-urw2B 128 or len(topic) == 0: + logger.error("topic string length too long,need decrease %d bytes" % (128 - len(topic))) + return False + return True + + +def thing_trigger_event(topic, event_tuple, cloud_name='default'): + try: + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + if not isinstance(event_tuple, tuple): + logger.error("Illegal event tuple. %s" % event_tuple) + return + event, params = event_tuple + request_id = __timestamp() + request = {'id':request_id, + 'version':"1.0", + 'params':{"value": params}, + 'method':"thing.event.%s.post" % event} + topic = topic.replace("{tsl.event.identifer}", event) + request = json.dumps(request) + wrap_payload = wrap_upstream_payload(topic, request, 0) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + except Exception as e: + try: + logger.error("thing_trigger_event error. %s" % e) + finally: + e = None + del e + + +def thing_post_property(topic, property_data, cloud_name='default'): + try: + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + request_id = __timestamp() + request = { + 'id': request_id, + 'version': '"1.0"', + 'params': property_data, + 'method': '"thing.event.property.post"'} + request = json.dumps(request) + wrap_payload = wrap_upstream_payload(topic, request, 1) + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + except Exception as e: + try: + logger.error("thing_post_property error. %s" % e) + finally: + e = None + del e + + +def publish_custom_topic(topic, payload, qos=1, cloud_name='default'): + try: + if "user" not in topic: + logger.error("Illegal topic. %s" % topic) + return + if not isinstance(payload, (str, bytes)): + payload = json.dumps(payload) + wrap_payload = wrap_upstream_payload(topic, payload, qos) + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + except Exception as e: + try: + logger.error("publish_custom_topic error. %s" % e) + finally: + e = None + del e + + +def thing_answer_rrpc(topic, payload, cloud_name='default'): + try: + if "rrpc" not in topic: + logger.error("Illegal topic. %s" % topic) + return + if not isinstance(payload, (str, bytes)): + payload = json.dumps(payload) + wrap_payload = wrap_upstream_payload(topic, payload, 0) + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + except Exception as e: + try: + logger.error("thing_answer_rrpc error. %s" % e) + finally: + e = None + del e + + +def thing_answer_service(topic, payload, cloud_name='default'): + try: + if not isinstance(payload, (str, bytes)): + payload = json.dumps(payload) + wrap_payload = wrap_upstream_payload(topic, payload, 0) + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + except Exception as e: + try: + logger.error("thing_answer_service error. %s" % e) + finally: + e = None + del e + + +def publish_generic_message(topic, payload, cloud_name='default'): + try: + if not isinstance(payload, (str, bytes)): + payload = json.dumps(payload) + wrap_payload = wrap_upstream_payload(topic, payload, 1) + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + except Exception as e: + try: + logger.error("publish_generic_message error. %s" % e) + finally: + e = None + del e + + +class ThingShadow: + + def __init__(self, product_key, device_name): + self._ThingShadow__product_key = product_key + self._ThingShadow__device_name = device_name + self._ThingShadow__version = None + self._ThingShadow__timestamp = None + self._ThingShadow__state = None + self._ThingShadow__metadata = None + self._ThingShadow__latest_shadow_lock = threading.Lock() + self._ThingShadow__latest_received_time = None + self._ThingShadow__lastest_received_payload = None + self._ThingShadow__thing_topic_shadow_get = "/shadow/get/%s/%s" % (self._ThingShadow__product_key, self._ThingShadow__device_name) + self._ThingShadow__thing_topic_shadow_update = "/shadow/update/%s/%s" % (self._ThingShadow__product_key, self._ThingShadow__device_name) + self.all_subscribe_topics = [self._ThingShadow__thing_topic_shadow_get] + + def get_version(self): + with self._ThingShadow__latest_shadow_lock: + return self._ThingShadow__version + + def get_metadata(self): + with self._ThingShadow__latest_shadow_lock: + return self._ThingShadow__metadata + + def get_state(self): + with self._ThingShadow__latest_shadow_lock: + return self._ThingShadow__state + + def set_state(self, state): + with self._ThingShadow__latest_shadow_lock: + self._ThingShadow__state = state + + def set_metadata(self, metadata): + with self._ThingShadow__latest_shadow_lock: + self._ThingShadow__metadata = metadata + + def set_version(self, version): + with self._ThingShadow__latest_shadow_lock: + self._ThingShadow__version = version + + def set_timestamp(self, timestamp): + with self._ThingShadow__latest_shadow_lock: + self._ThingShadow__timestamp = timestamp + + def set_latest_recevied_time(self, timestamp): + with self._ThingShadow__latest_shadow_lock: + self._ThingShadow__latest_received_time = timestamp + + def get_latest_recevied_time(self): + with self._ThingShadow__latest_shadow_lock: + return self._ThingShadow__latest_received_time + + def set_latest_recevied_payload(self, payload): + with self._ThingShadow__latest_shadow_lock: + self._ThingShadow__latest_received_payload = payload + + def get_latest_recevied_payload(self): + with self._ThingShadow__latest_shadow_lock: + return self._ThingShadow__latest_received_payload + + def to_dict(self): + return {'state':self._ThingShadow__state, 'metadata':self._ThingShadow__metadata, 'version':self._ThingShadow__version, 'timestamp':self._ThingShadow__timestamp} + + def to_json_string(self): + return json.dumps(self.to_dict()) + + def load_json(self, payload): + return json.loads(self.to_str(payload)) + + def to_str(self, payload): + if type(payload) is bytes: + return str(payload, "utf-8") + return payload + + def try_parse_try_shadow(self, payload): + try: + self.set_latest_recevied_time(self._ThingShadow__timestamp()) + self.set_latest_recevied_payload(payload) + msg = self.load_json(payload) + if "version" in msg: + self.set_version(msg["version"]) + else: + if "payload" in msg: + if "version" in msg["payload"]: + self.set_version(msg["payload"]["version"]) + if "timestamp" in msg: + self.set_timestamp(msg["timestamp"]) + else: + if "payload" in msg: + if "timestamp" in msg["payload"]: + self.set_timestamp(msg["payload"]["timestamp"]) + elif "payload" in msg: + if msg["payload"]["status"] == "success": + if "state" in msg["payload"]: + self.set_state(msg["payload"]["state"]) + if "metadata" in msg["payload"]: + self.set_metadata(msg["payload"]["metadata"]) + except Exception as e: + try: + logger.error("try_parse_try_shadow error: %s", e) + finally: + e = None + del e + + def thing_update_shadow(self, reported, version, cloud_name='default'): + request = {'state':{"reported": reported}, + 'method':"update", + 'version':version} + return self._update_shadow(request, cloud_name) + + def thing_get_shadow(self, cloud_name='default'): + request = {"method": "get"} + return self._update_shadow(request, cloud_name) + + def _update_shadow(self, request, cloud_name): + if not isinstance(request, (str, bytes)): + payload = json.dumps(request) + wrap_payload = wrap_upstream_payload(self._ThingShadow__thing_topic_shadow_update, payload, 1) + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + return (0, None) diff --git a/APPS_UNCOMPILED/src/quickfaas/awsiot.py b/APPS_UNCOMPILED/src/quickfaas/awsiot.py new file mode 100644 index 0000000..8769363 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/awsiot.py @@ -0,0 +1,455 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/awsiot.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 23313 bytes +""" +Drivers +Created on 2021/1/25 +@author: Lius +""" +import json, uuid +from threading import Lock +from common.Logger import logger +from .transport import mqtt_transport +from .packet_handler import wrap_upstream_payload +from .service_id import get_upstream_publish_topic_by_name +MAX_PAYLOAD_LENTH = 131072 + +def awsiot_publish(topic, payload, qos=1, cloud_name='default'): + """ + custom callback to publish data to awsiot, + eg: if use awsiot shadow, after receive message, custom should pub ack shadow status + :param topic: + :param payload: + :param qos: + :return: True if success else False + """ + try: + if not isinstance(payload, (str, bytes)): + payload = json.dumps(payload) + if qos not in (0, 1): + logger.warn("Invalid custom qos: %s" % qos) + return False + if len(payload) > MAX_PAYLOAD_LENTH: + raise ValueError("The message size can not exceeded the max allowed size of 128 KB.") + logger.debug("Custom publish data, topic: %s , qos : %s, payload : %s" % (topic, qos, payload)) + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + wrap_payload = wrap_upstream_payload(topic, payload, qos) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + return True + except Exception as e: + try: + logger.warn("Custom publish data error %s, topic %s, payload %s" % (e, topic, payload)) + finally: + e = None + del e + + return False + + +class _shadowRequestToken: + URN_PREFIX_LENGTH = 9 + + def getNextToken(self): + return uuid.uuid4().urn[self.URN_PREFIX_LENGTH[:None]] + + +class _basicJSONParser: + + def setString(self, srcString): + self._rawString = srcString + self._dictionObject = None + + def regenerateString(self): + return json.dumps(self._dictionaryObject) + + def getAttributeValue(self, srcAttributeKey): + return self._dictionaryObject.get(srcAttributeKey) + + def setAttributeValue(self, srcAttributeKey, srcAttributeValue): + self._dictionaryObject[srcAttributeKey] = srcAttributeValue + + def validateJSON(self): + try: + self._dictionaryObject = json.loads(self._rawString) + except ValueError: + return False + else: + return True + + +class _shadowAction: + _actionType = [ + "get", "update", "delete", "delta"] + + def __init__(self, srcShadowName, srcActionName): + if srcActionName is None or srcActionName not in self._actionType: + raise TypeError("Unsupported shadow action.") + self._shadowName = srcShadowName + self._actionName = srcActionName + self.isDelta = srcActionName == "delta" + if self.isDelta: + self._topicDelta = "$aws/things/" + str(self._shadowName) + "/shadow/update/delta" + else: + self._topicGeneral = "$aws/things/" + str(self._shadowName) + "/shadow/" + str(self._actionName) + self._topicAccept = "$aws/things/" + str(self._shadowName) + "/shadow/" + str(self._actionName) + "/accepted" + self._topicReject = "$aws/things/" + str(self._shadowName) + "/shadow/" + str(self._actionName) + "/rejected" + + def getTopicGeneral(self): + return self._topicGeneral + + def getTopicAccept(self): + return self._topicAccept + + def getTopicReject(self): + return self._topicReject + + def getTopicDelta(self): + return self._topicDelta + + +class DeviceShadow: + + def __init__(self, srcShadowName): + """ + + The class that denotes a local/client-side device shadow instance. + + Users can perform shadow operations on this instance to retrieve and modify the + corresponding shadow JSON document in AWS IoT Cloud. The following shadow operations + are available: + - Get + - Update + - Delete + - Listen on delta + + """ + self._shadowName = srcShadowName + self._basicJSONParserHandler = _basicJSONParser() + self._tokenHandler = _shadowRequestToken() + self._lastVersionInSync = -1 + self._isGetSubscribed = False + self._isUpdateSubscribed = False + self._isDeleteSubscribed = False + self._shadowSubscribeCallbackTable = dict() + self._shadowSubscribeCallbackTable["get"] = None + self._shadowSubscribeCallbackTable["delete"] = None + self._shadowSubscribeCallbackTable["update"] = None + self._shadowSubscribeCallbackTable["delta"] = None + self._shadowSubscribeStatusTable = dict() + self._shadowSubscribeStatusTable["get"] = 0 + self._shadowSubscribeStatusTable["delete"] = 0 + self._shadowSubscribeStatusTable["update"] = 0 + self._tokenPool = dict() + self._dataStructureLock = Lock() + self.upstream_publish_topic = "" + self.all_subscribe_topics = list() + self._get_all_subscribe_topics() + + def _get_all_subscribe_topics(self): + currentShadowAction = _shadowAction(self._shadowName, "get") + self.all_subscribe_topics.append(currentShadowAction.getTopicAccept()) + self.all_subscribe_topics.append(currentShadowAction.getTopicReject()) + self.all_subscribe_topics.append(currentShadowAction.getTopicDelta()) + currentShadowAction = _shadowAction(self._shadowName, "update") + self.all_subscribe_topics.append(currentShadowAction.getTopicAccept()) + self.all_subscribe_topics.append(currentShadowAction.getTopicReject()) + currentShadowAction = _shadowAction(self._shadowName, "delete") + self.all_subscribe_topics.append(currentShadowAction.getTopicAccept()) + self.all_subscribe_topics.append(currentShadowAction.getTopicReject()) + + def _doNonPersistentUnsubscribe(self, currentAction): + logger.debug("Unsubscribed to " + currentAction + " accepted/rejected topics for deviceShadow: " + self._shadowName) + + def is_subscribe_topic(self, topic): + return topic in self.all_subscribe_topics + + def generalCallback(self, topic, payload): + if not self.is_subscribe_topic(topic): + return + with self._dataStructureLock: + currentTopic = topic + currentAction = self._parseTopicAction(currentTopic) + currentType = self._parseTopicType(currentTopic) + payloadUTF8String = payload.decode("utf-8") + if currentAction in ('get', 'delete', 'update'): + self._basicJSONParserHandler.setString(payloadUTF8String) + if self._basicJSONParserHandler.validateJSON(): + currentToken = self._basicJSONParserHandler.getAttributeValue("clientToken") + if currentToken is not None: + logger.debug("shadow message clientToken: " + currentToken) + if currentToken is not None and currentToken in self._tokenPool.keys(): + logger.debug("Token is in the pool. Type: " + currentType) + if currentType == "accepted": + incomingVersion = self._basicJSONParserHandler.getAttributeValue("version") + if incomingVersion is not None and incomingVersion > self._lastVersionInSync and currentAction != "delete": + self._lastVersionInSync = incomingVersion + else: + self._lastVersionInSync = -1 + del self._tokenPool[currentToken] + self._shadowSubscribeStatusTable[currentAction] -= 1 + if self._shadowSubscribeStatusTable.get(currentAction) <= 0: + self._shadowSubscribeStatusTable[currentAction] = 0 + self._doNonPersistentUnsubscribe(currentAction) + if self._shadowSubscribeCallbackTable.get(currentAction) is not None: + self._shadowSubscribeCallbackTable[currentAction](payloadUTF8String, currentType, None) + else: + currentType += "/" + self._parseTopicShadowName(currentTopic) + self._basicJSONParserHandler.setString(payloadUTF8String) + if self._basicJSONParserHandler.validateJSON(): + incomingVersion = self._basicJSONParserHandler.getAttributeValue("version") + if incomingVersion is not None and incomingVersion > self._lastVersionInSync: + self._lastVersionInSync = incomingVersion + if self._shadowSubscribeCallbackTable.get(currentAction) is not None: + self._shadowSubscribeCallbackTable[currentAction](payloadUTF8String, currentType, None) + + def _parseTopicAction(self, srcTopic): + ret = None + fragments = srcTopic.split("/") + if fragments[5] == "delta": + ret = "delta" + else: + ret = fragments[4] + return ret + + def _parseTopicType(self, srcTopic): + fragments = srcTopic.split("/") + return fragments[5] + + def _parseTopicShadowName(self, srcTopic): + fragments = srcTopic.split("/") + return fragments[2] + + def shadowGet(self, srcCallback): + """ + **Description** + + Retrieve the device shadow JSON document from AWS IoT by publishing an empty JSON document to the + corresponding shadow topics. Shadow response topics will be subscribed to receive responses from + AWS IoT regarding the result of the get operation. Retrieved shadow JSON document will be available + in the registered callback. + + **Parameters** + + *srcCallback* - Function to be called when the response for this shadow request comes back. Should + be in form :code:`customCallback(payload, responseStatus, token)`, where :code:`payload` is the + JSON document returned, :code:`responseStatus` indicates whether the request has been accepted, + rejected or is a delta message, :code:`token` is the token used for tracing in this request. + + **Returns** + + The token used for tracing in this shadow request. + + """ + with self._dataStructureLock: + self._shadowSubscribeCallbackTable["get"] = srcCallback + self._shadowSubscribeStatusTable["get"] += 1 + currentToken = self._tokenHandler.getNextToken() + self._tokenPool[currentToken] = ["get", currentToken] + self._basicJSONParserHandler.setString("{}") + self._basicJSONParserHandler.validateJSON() + self._basicJSONParserHandler.setAttributeValue("clientToken", currentToken) + currentPayload = self._basicJSONParserHandler.regenerateString() + currentShadowAction = _shadowAction(self._shadowName, "get") + topic = currentShadowAction.getTopicGeneral() + wrap_payload = wrap_upstream_payload(topic, currentPayload, 0) + mqtt_transport.publish(self.upstream_publish_topic, json.dumps(wrap_payload)) + return currentToken + + def shadowDelete(self, srcCallback): + """ + **Description** + + Delete the device shadow from AWS IoT by publishing an empty JSON document to the corresponding + shadow topics. Shadow response topics will be subscribed to receive responses from AWS IoT + regarding the result of the get operation. Responses will be available in the registered callback. + **Parameters** + + *srcCallback* - Function to be called when the response for this shadow request comes back. Should + be in form :code:`customCallback(payload, responseStatus, token)`, where :code:`payload` is the + JSON document returned, :code:`responseStatus` indicates whether the request has been accepted, + rejected or is a delta message, :code:`token` is the token used for tracing in this request. + + **Returns** + + The token used for tracing in this shadow request. + + """ + with self._dataStructureLock: + self._shadowSubscribeCallbackTable["delete"] = srcCallback + self._shadowSubscribeStatusTable["delete"] += 1 + currentToken = self._tokenHandler.getNextToken() + self._tokenPool[currentToken] = ["delete", currentToken] + self._basicJSONParserHandler.setString("{}") + self._basicJSONParserHandler.validateJSON() + self._basicJSONParserHandler.setAttributeValue("clientToken", currentToken) + currentPayload = self._basicJSONParserHandler.regenerateString() + currentShadowAction = _shadowAction(self._shadowName, "delete") + topic = currentShadowAction.getTopicGeneral() + wrap_payload = wrap_upstream_payload(topic, currentPayload, 0) + mqtt_transport.publish(self.upstream_publish_topic, json.dumps(wrap_payload)) + return currentToken + + def shadowUpdate(self, srcJSONPayload, srcCallback): + """ + **Description** + + Update the device shadow JSON document string from AWS IoT by publishing the provided JSON + document to the corresponding shadow topics. Shadow response topics will be subscribed to + receive responses from AWS IoT regarding the result of the get operation. Response will be + available in the registered callback. + + **Parameters** + + *srcJSONPayload* - JSON document string used to update shadow JSON document in AWS IoT. + + *srcCallback* - Function to be called when the response for this shadow request comes back. Should + be in form :code:`customCallback(payload, responseStatus, token)`, where :code:`payload` is the + JSON document returned, :code:`responseStatus` indicates whether the request has been accepted, + rejected or is a delta message, :code:`token` is the token used for tracing in this request. + + **Returns** + + The token used for tracing in this shadow request. + + """ + self._basicJSONParserHandler.setString(srcJSONPayload) + if self._basicJSONParserHandler.validateJSON(): + with self._dataStructureLock: + currentToken = self._tokenHandler.getNextToken() + self._tokenPool[currentToken] = ["update", currentToken] + self._basicJSONParserHandler.setAttributeValue("clientToken", currentToken) + JSONPayloadWithToken = self._basicJSONParserHandler.regenerateString() + self._shadowSubscribeCallbackTable["update"] = srcCallback + self._shadowSubscribeStatusTable["update"] += 1 + currentShadowAction = _shadowAction(self._shadowName, "get") + topic = currentShadowAction.getTopicGeneral() + wrap_payload = wrap_upstream_payload(topic, JSONPayloadWithToken, 0) + mqtt_transport.publish(self.upstream_publish_topic, json.dumps(wrap_payload)) + else: + logger.warn("Invalid JSON file.") + return + return currentToken + + def shadowRegisterDeltaCallback(self, srcCallback): + """ + **Description** + + Listen on delta topics for this device shadow by subscribing to delta topics. Whenever there + is a difference between the desired and reported state, the registered callback will be called + and the delta payload will be available in the callback. + + **Parameters** + + *srcCallback* - Function to be called when the response for this shadow request comes back. Should + be in form :code:`customCallback(payload, responseStatus, token)`, where :code:`payload` is the + JSON document returned, :code:`responseStatus` indicates whether the request has been accepted, + rejected or is a delta message, :code:`token` is the token used for tracing in this request. + + **Returns** + + None + + """ + with self._dataStructureLock: + self._shadowSubscribeCallbackTable["delta"] = srcCallback + logger.info("Subscribed to delta topic for deviceShadow: " + self._shadowName) + + def shadowUnregisterDeltaCallback(self): + """ + **Description** + + Cancel listening on delta topics for this device shadow by unsubscribing to delta topics. There will + be no delta messages received after this API call even though there is a difference between the + desired and reported state. + + **Parameters** + + None + + **Returns** + + None + + """ + with self._dataStructureLock: + del self._shadowSubscribeCallbackTable["delta"] + logger.info("Unsubscribed to delta topics for deviceShadow: " + self._shadowName) + + +def shadowGet(shadowName, cloud_name='default'): + try: + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + currentShadowAction = _shadowAction(shadowName, "get") + topic = currentShadowAction.getTopicGeneral() + tokenHandler = _shadowRequestToken() + currentToken = tokenHandler.getNextToken() + basicJSONParserHandler = _basicJSONParser() + basicJSONParserHandler.setString("{}") + basicJSONParserHandler.validateJSON() + basicJSONParserHandler.setAttributeValue("clientToken", currentToken) + payload = basicJSONParserHandler.regenerateString() + wrap_payload = wrap_upstream_payload(topic, payload, 0) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + return True + except Exception as e: + try: + logger.error("API error. %s" % e) + finally: + e = None + del e + + return False + + +def shadowUpdate(shadowName, json_payload, cloud_name='default'): + try: + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + currentShadowAction = _shadowAction(shadowName, "update") + topic = currentShadowAction.getTopicGeneral() + basicJSONParserHandler = _basicJSONParser() + basicJSONParserHandler.setString(json_payload) + if basicJSONParserHandler.validateJSON(): + tokenHandler = _shadowRequestToken() + currentToken = tokenHandler.getNextToken() + basicJSONParserHandler.setAttributeValue("clientToken", currentToken) + JSONPayloadWithToken = basicJSONParserHandler.regenerateString() + wrap_payload = wrap_upstream_payload(topic, JSONPayloadWithToken, 0) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + return True + except Exception as e: + try: + logger.error("API error. %s" % e) + finally: + e = None + del e + + return False + + +def shadowDelete(shadowName, cloud_name='default'): + try: + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + currentShadowAction = _shadowAction(shadowName, "delete") + topic = currentShadowAction.getTopicGeneral() + tokenHandler = _shadowRequestToken() + currentToken = tokenHandler.getNextToken() + basicJSONParserHandler = _basicJSONParser() + basicJSONParserHandler.setString("{}") + basicJSONParserHandler.validateJSON() + basicJSONParserHandler.setAttributeValue("clientToken", currentToken) + currentPayload = basicJSONParserHandler.regenerateString() + wrap_payload = wrap_upstream_payload(topic, currentPayload, 0) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + return True + except Exception as e: + try: + logger.error("API error. %s" % e) + finally: + e = None + del e + + return False diff --git a/APPS_UNCOMPILED/src/quickfaas/azureiot.py b/APPS_UNCOMPILED/src/quickfaas/azureiot.py new file mode 100644 index 0000000..9d5c46b --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/azureiot.py @@ -0,0 +1,151 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/azureiot.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 7056 bytes +""" +Drivers +Created on 2021/2/4 +@author: Lius +""" +import sys, json +from datetime import date +from common.Logger import logger +import six.moves.urllib as urllib +from azure.iot.device import Message +from azure.iot.device.common import version_compat +from azure.iot.device import constant as device_constant +from .transport import mqtt_transport +from .packet_handler import wrap_upstream_payload +from .service_id import get_upstream_publish_topic_by_name + +def get_size(self): + total = 0 + total = total + sum((sys.getsizeof(v) for v in self.__dict__.values() if v is not None if v is not self.custom_properties)) + if self.custom_properties: + total = total + sum((sys.getsizeof(v) for v in self.custom_properties.values() if v is not None)) + return total + + +def send_message(topic, data, custom_properties={}, message_id=None, expiry_time_utc=None, correlation_id=None, user_id=None, content_encoding=None, content_type=None, output_name=None, cloud_name="default"): + try: + msg = Message(data) + msg.message_id = message_id + msg.expiry_time_utc = expiry_time_utc + msg.correlation_id = correlation_id + msg.user_id = user_id + msg.content_encoding = content_encoding + msg.content_type = content_type + msg.output_name = output_name + if isinstance(custom_properties, dict): + try: + for val in custom_properties.values(): + if isinstance(val, (dict, tuple, list, set)): + ex = "Custom properties dictionary does not support tuple/dict/list/set data type values." + raise ValueError(ex) + + if len(str(custom_properties)) <= 81920: + msg.custom_properties = custom_properties + else: + logger.warn("The custom properties exceeds the maximum byte limit > 81920.") + except Exception as e: + try: + logger.error(e) + finally: + e = None + del e + + else: + logger.warn("Expect a , got %s." % type(custom_properties)) + if msg.get_size() > device_constant.TELEMETRY_MESSAGE_SIZE_LIMIT: + raise ValueError("Size of telemetry message can not exceed 256 KB.") + upstream_publish_topic = get_upstream_publish_topic_by_name(cloud_name) + system_properties = [] + if msg.output_name: + system_properties.append(("$.on", str(msg.output_name))) + if msg.message_id: + system_properties.append(("$.mid", str(msg.message_id))) + if msg.correlation_id: + system_properties.append(("$.cid", str(msg.correlation_id))) + if msg.user_id: + system_properties.append(("$.uid", str(msg.user_id))) + if msg.content_type: + system_properties.append(("$.ct", str(msg.content_type))) + if msg.content_encoding: + system_properties.append(("$.ce", str(msg.content_encoding))) + if msg.iothub_interface_id: + system_properties.append(("$.ifid", str(msg.iothub_interface_id))) + if msg.expiry_time_utc: + system_properties.append(( + "$.exp", + msg.expiry_time_utc.isoformat() if isinstance(msg.expiry_time_utc, date) else msg.expiry_time_utc)) + system_properties_encoded = version_compat.urlencode(system_properties, + quote_via=(urllib.parse.quote)) + topic += system_properties_encoded + if msg.custom_properties: + if len(msg.custom_properties) > 0: + if system_properties: + if len(system_properties) > 0: + topic += "&" + custom_prop_seq = [(str(i[0]), str(i[1])) for i in list(msg.custom_properties.items())] + custom_prop_seq.sort() + keys = [i[0] for i in custom_prop_seq] + if len(keys) != len(set(keys)): + raise ValueError("Duplicate keys in custom properties!") + user_properties_encoded = version_compat.urlencode(custom_prop_seq, + quote_via=(urllib.parse.quote)) + topic += user_properties_encoded + wrap_payload = wrap_upstream_payload(topic, msg.data, 1) + mqtt_transport.publish(upstream_publish_topic, json.dumps(wrap_payload)) + except Exception as e: + try: + logger.error("[AzureIoT]: Sending message to cloud failed. %s" % e) + finally: + e = None + del e + + return False + + +def parse_c2d_message(topic, payload): + message = Message(payload) + parts = topic.split("/") + if len(parts) > 3: + if parts[3] == "devicebound": + if len(parts) > 4: + properties = parts[4] + else: + properties = None + else: + raise ValueError("topic has incorrect format") + ignored_extraction_values = [ + "iothub-ack", "$.to"] + if properties: + key_value_pairs = properties.split("&") + for entry in key_value_pairs: + pair = entry.split("=") + key = urllib.parse.unquote(pair[0]) + if len(pair) > 1: + value = urllib.parse.unquote(pair[1]) + else: + value = None + if key in ignored_extraction_values: + continue + elif key == "$.mid": + message.message_id = value + elif key == "$.cid": + message.correlation_id = value + elif key == "$.uid": + message.user_id = value + elif key == "$.ct": + message.content_type = value + elif key == "$.ce": + message.content_encoding = value + elif key == "$.exp": + message.expiry_time_utc = value + else: + message.custom_properties[key] = value + + return message diff --git a/APPS_UNCOMPILED/src/quickfaas/clouds.py b/APPS_UNCOMPILED/src/quickfaas/clouds.py new file mode 100644 index 0000000..17ebc20 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/clouds.py @@ -0,0 +1,32 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/clouds.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 629 bytes +""" +Drivers +Created on 2021/1/7 +@author: Lius +""" +import json +from common.Logger import logger +from common.InternalPath import MQTT_AGENT_STATUS_FILE + +def get_status(cloud_name='default'): + try: + cloud_status_file = MQTT_AGENT_STATUS_FILE.format(cloud_name=cloud_name) + with open(cloud_status_file, "r", encoding="utf-8-sig") as load_fd: + load_status = json.load(load_fd) + if load_status["status"] == 2: + return True + return False + except Exception as error: + try: + logger.error("Get cloud connection status failed(%s)" % error) + finally: + error = None + del error + + return False diff --git a/APPS_UNCOMPILED/src/quickfaas/config.py b/APPS_UNCOMPILED/src/quickfaas/config.py new file mode 100644 index 0000000..488797a --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/config.py @@ -0,0 +1,34 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/config.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 935 bytes +""" +Drivers +Created on 2021/1/12 +@author: Lius +""" +import json, uuid +from .transport import mqtt_transport +import common.InternalTopic as InternalTopic + +def set(config, timeout=10): + task_id = str(uuid.uuid1()) + wrap_payload = {'task_id':task_id, 'config':config} + result = mqtt_transport.sync_publish_message((InternalTopic.EVENT_BUS_SYSTEM_UPDATE_CONFIG), + (json.dumps(wrap_payload)), event_id=task_id, timeout=timeout) + if result: + return result["result"] + return "failed" + + +def get(runningConfig=True, timeout=10): + task_id = str(uuid.uuid1()) + wrap_payload = {'task_id':task_id, 'runningConfig':runningConfig} + result = mqtt_transport.sync_publish_message((InternalTopic.EVENT_BUS_SYSTEM_GET_CONFIG), + (json.dumps(wrap_payload)), event_id=task_id, timeout=timeout) + if result: + return result["config"] + return result diff --git a/APPS_UNCOMPILED/src/quickfaas/controller.py b/APPS_UNCOMPILED/src/quickfaas/controller.py new file mode 100644 index 0000000..3d1c195 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/controller.py @@ -0,0 +1,86 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/controller.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 2267 bytes +""" +Drivers +Created on 2021/1/11 +@author: Lius +""" +import copy +from common.Logger import logger +from .config import get +CONTROLLERS_STATUS = dict() + +def get_status(controller=None): + global CONTROLLERS_STATUS + try: + if controller: + con_status = list() + con_status.append(CONTROLLERS_STATUS[controller]) + return con_status + return list(CONTROLLERS_STATUS.values()) + except Exception as e: + try: + logger.error("Get controller status failed(%s)" % e) + finally: + e = None + del e + + return list() + + +def get_controller_status(controller=None): + try: + controller_status = copy.deepcopy(CONTROLLERS_STATUS) + for _, controller_item in controller_status.items(): + if "name" in controller_item: + controller_item.pop("name") + + if controller: + return {controller: (controller_status[controller])} + return controller_status + except Exception as e: + try: + logger.error("Get controller status failed(%s)" % e) + finally: + e = None + del e + + return dict() + + +def _find_controller_by_name(controllers, controller_name): + for controller in controllers: + if controller["name"] == controller_name: + return controller + + +def get_inspur_controller_status(controller=None): + try: + global_controllers_config = get()["controllers"] + controller_status = copy.deepcopy(CONTROLLERS_STATUS) + for controller_name, controller_item in controller_status.items(): + con = _find_controller_by_name(global_controllers_config, controller_name) + if con: + if "ProductKey" in con["args"]: + controller_item["ProductKey"] = con["args"]["ProductKey"] + if "DeviceName" in con["args"]: + controller_item["DeviceName"] = con["args"]["DeviceName"] + if "name" in controller_item: + controller_item.pop("name") + + if controller: + return {controller: (controller_status[controller])} + return controller_status + except Exception as e: + try: + logger.error("Get controller status failed(%s)" % e) + finally: + e = None + del e + + return dict() diff --git a/APPS_UNCOMPILED/src/quickfaas/ds1_wizard.py b/APPS_UNCOMPILED/src/quickfaas/ds1_wizard.py new file mode 100644 index 0000000..5c10371 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/ds1_wizard.py @@ -0,0 +1,172 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/ds1_wizard.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 6741 bytes +""" +Drivers +Created on 2021/1/12 +@author: Lius +""" +import json, functools +from inspect import isfunction +from common.Logger import logger +from .measure import recall2, write_plc_values +from .config import get as get_config +from .remotebus import publish as publish_to_cloud +from .global_dict import get_global_parameter +from .clouds import get_status as get_cloud_status +from .controller import get_status as get_ctrls_status +from .azureiot import send_message as azure_send_message +from .aliyuniot import thing_answer_service as aliyun_thing_answer_service +from .aliyuniot import thing_trigger_event as aliyun_thing_trigger_event +from .aliyuniot import thing_post_property as aliyun_thing_post_property +from .awsiot import awsiot_publish as aws_awsiot_publish +from .transport import mqtt_transport +import common.InternalTopic as InternalTopic + +def required_first_args(fun): + + @functools.wraps(fun) + def wrapper(*args, **kwargs): + if len(args) > 1 and args[1] or "message" in kwargs: + if kwargs["message"]: + return fun(*args, **kwargs) + logger.warn("The first parameter cannot is empty. method is invalid.") + + return wrapper + + +def required_callback(idx): + + def wrapper(fun): + + @functools.wraps(fun) + def _wrapper(*args, **kwargs): + if len(args) >= idx and isfunction(args[idx - 1]) or "callback" in kwargs: + if isfunction(kwargs["callback"]): + return fun(*args, **kwargs) + logger.warn("The callback parameter cannot is empty or non-method. method is invalid.") + + return _wrapper + + return wrapper + + +class Ds1Wizard(object): + + def __init__(self): + self.faas_upload_event = dict() + self.aliyuniot_post_property_topic = None + self.aliyuniot_trigger_event_topic = None + self.aliyuniot_service_pattern_topic = None + self.azureiot_send_message_topic = "" + + def mqtt_publish(self, topic, payload, qos=0, userdata=None): + logger.warn("This API has been deprecated, please use the new API: 'from quickfaas.remotebus import publish'") + return publish_to_cloud(topic, payload, qos) + + def save_data(self, data, group=None, userdata=None): + """Sava data to database""" + pass + + def get_global_parameter(self): + return get_global_parameter() + + @required_callback(2) + def recall_data(self, callback=None, tail=None, timeout=60): + try: + logger.warn("This API has been deprecated, please use the new API: 'from quickfaas.measure import recall2'") + recall2(callback=callback, userdata=tail, timeout=timeout) + except Exception as error: + try: + logger.error("recall measure data failed(%s)" % error) + finally: + error = None + del error + + @required_callback(2) + def get_tag_config(self, callback=None, tail=None, timeout=60): + try: + logger.warn("This API has been deprecated, please use the new API: 'from quickfaas.config import get'") + config = get_config() + if callback: + callback(config, tail, self) + except Exception as e: + try: + logger.error("get tag config method execution error.(subscribe) %s" % e) + finally: + e = None + del e + + @required_first_args + def write_plc_values(self, message, callback=None, tail=None, timeout=60): + try: + logger.warn("This API has been deprecated, please use the new API: 'from quickfaas.measure import write_plc_values'") + write_plc_values(message=message, callback=callback, userdata=tail, timeout=timeout) + except Exception as e: + try: + logger.error("write plc values failed(%s)" % e) + finally: + e = None + del e + + def get_connected_devices_status(self): + logger.warn("This API has been deprecated, please use the new API: 'from quickfaas.controller import get_controller_status'") + controllers_status = get_ctrls_status() + devices_status = dict() + for controller in controllers_status: + devices_status[controller["name"]] = controller["health"] + + return devices_status + + def get_connected_cloud_status(self): + logger.warn("This API has been deprecated, please use the new API: 'from quickfaas.clouds import get_status'") + if get_cloud_status(): + return 2 + return 1 + + def get_group(self, update_dict=None): + return get_config()["groups"] + + def update_group(self, group_data): + mqtt_transport.publish(InternalTopic.EVENT_BUS_SYSTEM_GROUP_UPDATE_CONFIG, json.dumps(group_data)) + return {"results": group_data} + + def add_group(self, group_data): + pass + + def delete_group(self, data): + pass + + def thing_post_property(self, property_data): + return aliyun_thing_post_property(self.aliyuniot_post_property_topic, property_data) + + def thing_trigger_event(self, event_tuple): + return aliyun_thing_trigger_event(self.aliyuniot_trigger_event_topic, event_tuple) + + def thing_answer_service(self, identifier, request_id, code, data): + if data is None: + data = {} + response = {'id':request_id, + 'code':code, + 'data':data} + topic = self.aliyuniot_service_pattern_topic.replace("+", identifier) + "_reply" + aliyun_thing_answer_service(topic, response) + + def thing_answer_rrpc(self, rrpc_id, payload): + pass + + def on_topic_answer_rrpc(self, rrpc_id, topic, payload, qos=1): + if "request" in topic: + topic = topic.replace("request", "response") + publish_to_cloud(topic, payload, qos) + + def awsiot_publish(self, topic, payload, qos=1): + return aws_awsiot_publish(topic, payload, qos) + + def send_message_to_cloud(self, data, custom_properties={}, message_id=None, expiry_time_utc=None, correlation_id=None, user_id=None, content_encoding=None, content_type=None, output_name=None): + logger.warn("This API has been deprecated, please use the new API: 'from quickfaas.azureiot import send_message'") + return azure_send_message(self.azureiot_send_message_topic, data, custom_properties, message_id, expiry_time_utc, correlation_id, user_id, content_encoding, content_type, output_name) diff --git a/APPS_UNCOMPILED/src/quickfaas/faas_handler.py b/APPS_UNCOMPILED/src/quickfaas/faas_handler.py new file mode 100644 index 0000000..bfadd07 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/faas_handler.py @@ -0,0 +1,855 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/faas_handler.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 46672 bytes +""" +Drivers +Created on 2021/1/6 +@author: Lius +""" +import copy, json, time, types, base64, threading +from .transport import mqtt_transport +from itertools import chain +from common import CloudType +from common.Logger import logger +from .ds1_wizard import Ds1Wizard +from .awsiot import DeviceShadow +from .aliyuniot import ThingShadow, thing_post_property, thing_trigger_event +import common.Utilities as Utilities +from .controller import CONTROLLERS_STATUS +from paho.mqtt.client import topic_matches_sub +import common.InternalTopic as InternalTopic +from .global_dict import get as get_global_args +from .service_id import AENTE_SERVICE_INFO_MAPS +from .service_id import get_quickfaas_service_id +from quickfaas.azureiot import parse_c2d_message +from quickfaas.remotebus import publish as remotebus_publish +from quickfaas.clouds import get_status +from common.CloudType import AliyunIoT, AzureIoT +from common.TriggerType import MESSAGE_EVENT_MESSAGE, TIMER_EVENT_MESSAGE, DSA_START_EVENT_MESSAGE +from common.TriggerType import WARNING_EVENT_MESSAGE, COMMAND_EVENT_MESSAGE, MEASURE_EVENT_MESSAGE +from quickfaas.mqttSparkPlugB import publish_nodeAndController_message, open_file_lock, publish_device_online, publish_device_offline + +class FaaSHandler: + + def __init__(self, scheduler, transport, thread_pool, max_concurrent=3): + self.scheduler = scheduler + self.transport = transport + self.generic_event = dict() + self.upload_event = dict() + self.subcribe_event = dict() + self.subcribe_downstream_topics = list() + self.ds1 = Ds1Wizard() + self.ds1.faas_upload_event = self.upload_event + self.transport.ds1 = self.ds1 + self.max_concurrent = max_concurrent + self.thread_pool = thread_pool + self.transport.thread_pool = thread_pool + self.thread_exec_result = dict() + self.thread_exec_result_lock = threading.Lock() + self.mqtt_agent_subscribe_topics = dict() + self.full_thread_pool() + self.future_job_maps = dict() + self.future_job_maps_lock = threading.Lock() + self.scheduler.add_job((self.future_monitor), "interval", seconds=5, id="future_monitor", replace_existing=True) + + def shutdown(self, wait=True): + self.thread_pool.shutdown(wait=wait) + + def full_thread_pool(self): + for _ in range(0, self.max_concurrent): + self.thread_pool.submit(self.full_thread) + + def full_thread(self): + time.sleep(0.5) + + def future_monitor(self): + current_time = time.time() + with self.future_job_maps_lock: + for future_id, future in self.future_job_maps.items(): + if future_id in self.thread_exec_result: + logger.debug("Task <%s> running time: %s seconds" % ( + self.thread_exec_result[future_id].name, current_time - future[0])) + else: + logger.debug("Task <%s> running time: %s seconds" % (future_id, current_time - future[0])) + + def update_clouds_info(self, clouds): + for cloud in clouds: + AENTE_SERVICE_INFO_MAPS[cloud["name"]] = cloud + self.mqtt_agent_subscribe_topics[cloud["name"]] = {'sub_passage':"", 'sub_topics':list()} + if cloud["type"] == "SparkPlugB MQTT" and cloud["enable"] == 1 and get_status(cloud["name"]): + file_path = "/tmp/sparkplugb%s.txt" % cloud["name"] + try: + with open(file_path, "r", encoding="utf-8") as fd: + sparkPlugb_info_json = json.load(fd) + if "bdseq" in sparkPlugb_info_json: + bdseq = int(sparkPlugb_info_json["bdseq"]) + publish_nodeAndController_message(bdseq, cloud["name"]) + else: + topic = "ds2/eventbus/system/sparkplugb/rebirth" + mqtt_transport.publish(topic, cloud["name"]) + logger.error("cloud name %s will Reboot" % cloud["name"]) + return + except IOError: + topic = "ds2/eventbus/system/sparkplugb/rebirth" + mqtt_transport.publish(topic, cloud["name"]) + logger.error("cloud name %s will Reboot" % cloud["name"]) + return + + def verify_and_build_script(self, name, funcname, script): + """ + This method checks and instantiates the script. + """ + try: + logger.info("Build module: %s, script: %s" % (name, script)) + mymodule = types.ModuleType("mymodule") + mymodule.__file__ = name + exec(script, mymodule.__dict__) + enter_func = eval("mymodule.%s" % funcname) + logger.info("Build OK.") + return (mymodule, enter_func) + except Exception as e: + try: + logger.error("Build module error. %s" % e) + finally: + e = None + del e + + return (None, None) + + def new_qfaas_instance(self, faas): + qfaas_ins = None + trigger_type = faas["trigger"] + if trigger_type == MESSAGE_EVENT_MESSAGE: + qfaas_ins = GeneralEvent(faas["name"]) + qfaas_ins.type = trigger_type + qfaas_ins.topic = self.format_wildcard_topic(faas["topic"]) + qfaas_ins.add_subscribe_event(self.transport) + else: + if trigger_type == TIMER_EVENT_MESSAGE: + qfaas_ins = TimerEvent(faas["name"]) + qfaas_ins.type = trigger_type + if faas["timeUnit"] == 2: + qfaas_ins.period = int(faas["period"]) * 60 * 60 + else: + if faas["timeUnit"] == 1: + qfaas_ins.period = int(faas["period"]) * 60 + else: + qfaas_ins.period = int(faas["period"]) + qfaas_ins.add_timer(self.scheduler) + else: + if trigger_type == DSA_START_EVENT_MESSAGE: + qfaas_ins = TimerEvent(faas["name"]) + qfaas_ins.type = trigger_type + qfaas_ins.add_timer(self.scheduler) + else: + if trigger_type == MEASURE_EVENT_MESSAGE: + qfaas_ins = MeasureEvent(faas["name"]) + qfaas_ins.hide_offline_data = faas["hideOfflineData"] + qfaas_ins.type = trigger_type + qfaas_ins.msg_type = faas["msgType"] + qfaas_ins.groups = copy.deepcopy(faas["groups"]) + else: + if trigger_type == WARNING_EVENT_MESSAGE: + qfaas_ins = AlarmEvent(faas["name"]) + qfaas_ins.type = trigger_type + qfaas_ins.msg_type = faas["msgType"] + qfaas_ins.alarms = copy.deepcopy(faas["alarms"]) + else: + if trigger_type == COMMAND_EVENT_MESSAGE: + qfaas_ins = SubcribeEvent(faas["name"]) + qfaas_ins.type = trigger_type + qfaas_ins.msg_type = faas["msgType"] + else: + if qfaas_ins: + qfaas_ins.func_name = faas["funcName"] + return qfaas_ins + + def add_generic_faas_instance(self, faas): + script, main = self.verify_and_build_script(faas["name"], faas["funcName"], faas["script"]) + if not script: + return + else: + faas_ins = self.new_qfaas_instance(faas) + return faas_ins or None + faas_ins.script = script + faas_ins.main = main + self.generic_event[faas["name"]] = faas_ins + + def add_upload_faas_instance(self, faas, cloud): + script, main = self.verify_and_build_script(faas["name"], faas["funcName"], faas["script"]) + if not script: + return + faas_ins = self.new_qfaas_instance(faas) + if not faas_ins: + return + faas_ins.cloud = cloud + faas_ins.cloud_name = faas["cloudName"] + faas_ins.topic = self.format_wildcard_topic(faas["topic"], cloud) + if "qos" in faas: + faas_ins.qos = int(faas["qos"]) + faas_ins.script = script + faas_ins.main = main + setattr(faas_ins.script, "__cloud__", faas_ins.cloud_name) + setattr(faas_ins.script, "__topic__", faas_ins.topic) + setattr(faas_ins.script, "__qos__", faas_ins.qos) + if cloud["type"] == CloudType.AliyunIoT: + if faas_ins.msg_type == 1: + self.ds1.aliyuniot_post_property_topic = faas_ins.topic + elif faas_ins.msg_type == 2: + self.ds1.aliyuniot_trigger_event_topic = faas_ins.topic + else: + if cloud["type"] == CloudType.AzureIoT: + if faas_ins.topic: + self.ds1.azureiot_send_message_topic = faas_ins.topic + faas_ins.add_subscribe_event(self.transport) + self.upload_event[faas["name"]] = faas_ins + + def add_download_faas_instance(self, faas, cloud): + script, main = self.verify_and_build_script(faas["name"], faas["funcName"], faas["script"]) + if not script: + return + else: + faas_ins = self.new_qfaas_instance(faas) + return faas_ins or None + faas_ins.cloud = cloud + faas_ins.cloud_name = faas["cloudName"] + faas_ins.topic = self.format_wildcard_topic(faas["topic"], cloud) + faas_ins.topics.add(faas_ins.topic) + if "qos" in faas: + faas_ins.qos = int(faas["qos"]) + faas_ins.script = script + faas_ins.main = main + if "payload_type" in faas: + faas_ins.payload_type = faas["payload_type"] + setattr(faas_ins.script, "__cloud__", faas_ins.cloud_name) + if cloud["type"] == CloudType.AWSIoT: + if faas_ins.msg_type == 1: + try: + shadow_name = faas_ins.topic.split("/")[2] + faas_ins.script.deviceshadow = DeviceShadow(shadow_name) + for topic in faas_ins.script.deviceshadow.subscribe_topics_of_remote: + self.subscribe_topics_of_remote(topic, cloud_name=(faas["cloudName"])) + faas_ins.topics.add(topic) + + except Exception: + pass + + else: + if faas_ins.msg_type == 2: + pass + else: + if cloud["type"] == CloudType.AliyunIoT: + if faas_ins.msg_type == 1: + self.ds1.aliyuniot_service_pattern_topic = faas_ins.topic + else: + if faas_ins.msg_type == 2: + pass + elif faas_ins.msg_type == 3: + try: + product_key = faas_ins.topic.split("/")[2] + device_name = faas_ins.topic.split("/")[3] + faas_ins.script.aliyunshadow = ThingShadow(product_key, device_name) + for topic in faas_ins.script.aliyunshadow.subscribe_topics_of_remote: + self.subscribe_topics_of_remote(topic, cloud_name=(faas["cloudName"])) + faas_ins.topics.add(topic) + + except Exception: + pass + + else: + if cloud["type"] == CloudType.AzureIoT: + if faas_ins.msg_type == 1: + pass + if faas_ins.msg_type in (0, 1, 2, 3): + self.subscribe_topics_of_remote(faas_ins.topic, faas_ins.qos, faas["cloudName"]) + self.subscribe_remote_downstream_topic(faas["cloudName"]) + self.subcribe_event[faas["name"]] = faas_ins + + def subscribe_topics_of_remote(self, topic, qos=1, cloud_name='default'): + warp_payload = {'topic':topic, 'qos':qos} + json_warp_payload = json.dumps(warp_payload) + agent_service_id = AENTE_SERVICE_INFO_MAPS[cloud_name]["serviceId"] + quickfaas_service_id = get_quickfaas_service_id() + sub_topic = InternalTopic.EVENT_BUS_REMOTE_SUBSCRIBES.format(agentServiceId=agent_service_id, srcServiceId=quickfaas_service_id) + self.mqtt_agent_subscribe_topics[cloud_name]["sub_passage"] = sub_topic + self.mqtt_agent_subscribe_topics[cloud_name]["sub_topics"].append(json_warp_payload) + self.transport.publish(sub_topic, json_warp_payload) + + def subscribe_remote_downstream_topic(self, cloud_name='default'): + agent_service_id = AENTE_SERVICE_INFO_MAPS[cloud_name]["serviceId"] + quickfaas_service_id = get_quickfaas_service_id() + _topic = InternalTopic.EVENT_BUS_REMOTE_DOWNSTREAM_PUBLISH.format(agentServiceId=agent_service_id, srcServiceId=quickfaas_service_id) + if _topic not in self.subcribe_downstream_topics: + self.transport.subscribe(_topic) + self.subcribe_downstream_topics.append(_topic) + + def start_handler(self): + self.mqtt_connected_after() + self.transport.on_mqtt_connected_handler = self.mqtt_connected_after + self.transport.on_mqtt_message_received_handler = self._message_received_handler + + def get_azure_connection_string(self, connection_string, name): + args = connection_string.split(";") + for arg in args: + if arg.find(name) != -1: + return arg.split("=")[-1] + + return "" + + def format_wildcard_topic(self, topic, current_cloud=None): + for arg in get_global_args(): + raw_str = "${%s}" % arg["key"] + if raw_str in topic: + topic = topic.replace(raw_str, arg["value"]) + + if current_cloud is None: + return topic + if current_cloud.get("type") == AliyunIoT: + if "{ProductKey}" in topic: + topic = topic.replace("{ProductKey}", current_cloud.get("args").get("productKey")) + if "{DeviceName}" in topic: + topic = topic.replace("{DeviceName}", current_cloud.get("args").get("deviceName")) + elif current_cloud.get("type") == AzureIoT: + if "{DeviceId}" in topic: + if current_cloud.get("args").get("authType") == "Symmetric key": + topic = topic.replace("{DeviceId}", str(self.get_azure_connection_string(current_cloud.get("args").get("connectionString"), "DeviceId"))) + else: + topic = topic.replace("{DeviceId}", str(current_cloud.get("args").get("deviceID"))) + return topic + + def get_return_result(self, future): + future_id = id(future) + try: + result = future.result() + logger.debug("Result: %s" % str(result)) + except Exception as error: + try: + result = None + logger.warn("The script may run incorrectly(%s)" % error) + finally: + error = None + del error + + faas_ins = None + with self.thread_exec_result_lock: + if future_id in self.thread_exec_result: + faas_ins = self.thread_exec_result[future_id] + if result: + if faas_ins: + if faas_ins.msg_type == 1: + logger.debug("Aliyun thing_post_property") + thing_post_property((faas_ins.topic), result, cloud_name=(faas_ins.cloud_name)) + else: + if faas_ins.msg_type == 2: + logger.debug("Aliyun thing_trigger_event") + thing_trigger_event((faas_ins.topic), result, cloud_name=(faas_ins.cloud_name)) + else: + logger.debug("remotebus publish") + payload = result if isinstance(result, (str, bytes)) else json.dumps(result) + remotebus_publish((faas_ins.topic), payload, 1, cloud_name=(faas_ins.cloud_name)) + with self.thread_exec_result_lock: + if future_id in self.thread_exec_result: + del self.thread_exec_result[future_id] + with self.future_job_maps_lock: + if future_id in self.future_job_maps: + if faas_ins: + logger.debug("Task(%s) done, life time: %f seconds" % ( + faas_ins.name, time.time() - self.future_job_maps[future_id][0])) + del self.future_job_maps[future_id] + + def _ds2_convert_measure_to_ds1(self, measures): + """ + DS2 format: + { + 'timestamp': 1589434519, + 'timestampMsec': 1589434519123, + 'group': 'default', + 'measures': [ + { + 'ctrlName': 'Modbus', + 'name': 'measure1', + 'health': 1, + 'timestamp': 1620618445, + 'timestampMsec': 1589434519123, + 'value': 10 + }, + { + 'ctrlName': 'Modbus', + 'name': 'measure2', + 'health': 0, + 'timestamp': 1620618443, + 'timestampMsec': 1589434519123, + 'value': 0 + } + ] + } + + TO DS1 format: + { + 'timestamp': 1589434519, + 'group_name': 'default', + 'values': + { + 'Modbus': + { + 'measure1': + { + 'raw_data': 10, + 'timestamp': 1620618445, + 'status': 1 + }, + 'measure2': + { + 'raw_data': 0, + 'timestamp': 1620618443, + 'status': 0 + } + } + } + } + """ + ds1 = dict({'timestamp':measures["timestamp"], 'timestampMsec':measures["timestampMsec"], 'group_name':measures["group"], + 'values':dict()}) + for measure in measures["measures"]: + if measure["ctrlName"] not in ds1["values"]: + ds1["values"][measure["ctrlName"]] = dict() + ds1["values"][measure["ctrlName"]][measure["name"]] = {'raw_data':measure["value"], + 'timestamp':measure["timestamp"], 'status':measure["health"], 'timestampMsec':measure["timestampMsec"]} + + return ds1 + + def _find_uploadRule_config(self, uploadRules, ctrlName, measureName): + for uploadRule in uploadRules: + if uploadRule["measureName"] == measureName and uploadRule["ctrlName"] == ctrlName: + return uploadRule + + def _upload_measure_config(self, measures, uploadRules, hide_offline_data): + """ + { + 'timestamp': 1589434519, + 'timestampMsec': 1589434519123, + 'group': 'default', + 'measures': [ + { + 'ctrlName': 'Modbus', + 'name': 'measure1', + 'health': 1, + 'timestamp': 1620618445, + 'timestampMsec': 1620618445123, + 'value': 10 + }, + { + 'ctrlName': 'Modbus', + 'name': 'measure2', + 'health': 0, + 'timestamp': 1620618443, + 'timestampMsec': 1620618445123, + 'value': 0 + } + ] + } + + uploadRules: [ + { + "measureName": "measure1", + "ctrlName": "Modbus", + "hide": 0, + "uploadName": "my_test2" + } + ] + """ + upload_measure = {'timestamp':measures["timestamp"], + 'timestampMsec':measures["timestampMsec"], 'group':measures["group"], + 'measures':[]} + for measure in measures["measures"]: + if hide_offline_data: + if measure["health"] == 0: + continue + uploadRule = self._find_uploadRule_config(uploadRules, measure["ctrlName"], measure["name"]) + if uploadRule: + if uploadRule["hide"]: + continue + if len(uploadRule["uploadName"]): + measure["name"] = uploadRule["uploadName"] + upload_measure["measures"].append(measure) + + if len(upload_measure["measures"]) == 0: + return + return upload_measure + + def _ds2_convert_warning_to_ds1(self, topic, warnings): + """ + DS2 format: + { + "name": "alarm1", + "ctrlName": "Modbus", + "measureName": "measure1", + "priority": 1, + "timestamp": 1589434527, + "status": 1, + "value": 60, + "alarm_value": 60, + "content": "速度超过60" + } + + TO DS1 format: + { + 'timestamp': 1589434527.3628697, + 'group_name': 'warning', + 'values': + { + 'alarm1': + { + "ctrlName": "Modbus", + "measureName": "measure1", + 'timestamp': 1589434527, + 'current': 'on', + 'status': 0, + 'value': 60, + "alarm_value": 60, + 'alarm_content': '速度超过60', + 'level': 1 + } + } + } + """ + ds1 = dict({'timestamp':int(time.time()), 'group_name':topic.split("/")[-1], 'timestampMsec':int(round(time.time(), 3) * 1000), + 'values':dict()}) + warning = dict() + warning["ctrlName"] = warnings["ctrlName"] + warning["measureName"] = warnings["measureName"] + warning["timestamp"] = warnings["timestamp"] + warning["timestampMsec"] = warnings["timestampMsec"] + warning["current"] = "off" if warnings["status"] == 0 else "on" + warning["status"] = 1 if warnings["status"] == 0 else 0 + warning["value"] = warnings["value"] + warning["alarm_value"] = warnings["alarm_value"] + warning["alarm_content"] = warnings["content"] + warning["level"] = warnings["priority"] + ds1["values"][warnings["name"]] = warning + return ds1 + + def _upload_warning_config(self, warnings, uploadRules): + """ + { + "name": "alarm1", + "ctrlName": "Modbus", + "measureName": "measure1", + "priority": 1, + "timestamp": 1589434527, + "status": 1, + "value": 60, + "content": "速度超过60" + } + + uploadRules: [ + { + "measureName": "measure1", + "ctrlName": "Modbus", + "hide": 0, + "uploadName": "my_test2" + } + ] + """ + uploadRule = self._find_uploadRule_config(uploadRules, warnings["ctrlName"], warnings["measureName"]) + if uploadRule: + if uploadRule["hide"]: + return + if len(uploadRule["uploadName"]): + warnings["measureName"] = uploadRule["uploadName"] + return warnings + + def submit_jobParse error at or near `COME_FROM' instruction at offset 1096_0 + + def _message_received_handler(self, topic, payload): + """ + This method handles the client subscribing to all topic information from the MQTT broker. + FixMe:这个函数在paho-mqtt后台线程的上下文被调用 + """ + try: + payload = payload if not isinstance(payload, (str, bytes)) else Utilities.json_to_obj(payload) + except Exception: + payload = payload + + if topic_matches_sub(InternalTopic.EVENT_BUS_SYSTEM_CLOUD_CONN_NOICE, topic): + if payload["status"] != 2: + return + cloud_name = payload["cloud_name"] + for topic in self.mqtt_agent_subscribe_topics[cloud_name]["sub_topics"]: + self.transport.publish(self.mqtt_agent_subscribe_topics[cloud_name]["sub_passage"], topic) + + for cloud, cloud_config in AENTE_SERVICE_INFO_MAPS.items(): + if cloud_config["type"] == "SparkPlugB MQTT" and cloud_config["enable"] == 1 and get_status(cloud_config["name"]) and cloud_name == cloud: + with open_file_lock: + file_path = "/tmp/sparkplugb%s.txt" % cloud_name + try: + with open(file_path, "r", encoding="utf-8") as fd: + sparkPlugb_info_json = json.load(fd) + if "bdseq" in sparkPlugb_info_json: + bdseq = int(sparkPlugb_info_json["bdseq"]) + publish_nodeAndController_message(bdseq, cloud_name) + else: + topic = "ds2/eventbus/system/sparkplugb/rebirth" + mqtt_transport.publish(topic, cloud) + logger.error("cloud name %s will Reboot" % cloud) + return + except IOError: + topic = "ds2/eventbus/system/sparkplugb/rebirth" + mqtt_transport.publish(topic, cloud_name) + logger.error("cloud name %s will Reboot" % cloud_name) + return + + return + if topic_matches_sub(InternalTopic.EVENT_BUS_NORTH_CTRL_STATUS, topic): + CONTROLLERS_STATUS.clear() + for ctrl in payload: + CONTROLLERS_STATUS[ctrl["name"]] = ctrl + + for cloud, cloud_config in AENTE_SERVICE_INFO_MAPS.items(): + if cloud_config["type"] == "SparkPlugB MQTT" and cloud_config["enable"] == 1 and get_status(cloud_config["name"]): + file_path = "/tmp/sparkplugb%s.txt" % cloud_config["name"] + with open_file_lock: + try: + with open(file_path, "r", encoding="utf-8") as fd: + sparkPlugb_info_json = json.load(fd) + if "bdseq" in sparkPlugb_info_json: + return + seq = int(sparkPlugb_info_json["seq"]) + controller_health = sparkPlugb_info_json["controller_health"] + except IOError: + topic = "ds2/eventbus/system/sparkplugb/rebirth" + mqtt_transport.publish(topic, cloud_name) + logger.error("cloud name %s will Reboot" % cloud_name) + return + else: + for controller_info in payload: + if controller_info["name"] in controller_health: + if controller_info["health"] == controller_health[controller_info["name"]]: + continue + if controller_info["health"] == 1: + controller_health[controller_info["name"]] = 1 + seq = publish_device_online(seq, cloud, controller_info["timestamp"], controller_info["name"]) + else: + controller_health[controller_info["name"]] = 0 + publish_device_offline(seq, cloud, controller_info["timestamp"], controller_info["name"]) + seq += 1 + if seq > 255: + seq = 0 + else: + controller_health[controller_info["name"]] = controller_info["health"] + if controller_health[controller_info["name"]] == 1: + seq = publish_device_online(seq, cloud, controller_info["timestamp"], controller_info["name"]) + + with open(file_path, "w", encoding="utf-8") as fd: + sparkPlugb_info_dict = {'seq':seq, + 'controller_health':controller_health} + json.dump(sparkPlugb_info_dict, fd, indent=1, ensure_ascii=False) + + return + if topic_matches_sub(InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PING, topic): + self.on_sys_service_status_ping(topic, payload) + return + remote_topic = topic + if InternalTopic.EVENT_BUS_REMOTE_DOWNSTREAM_PUBLISH_PREFIX in topic: + remote_topic = payload["topic"] if "topic" in payload else topic + for faas_ins in chain(self.generic_event.values(), self.upload_event.values(), self.subcribe_event.values()): + if faas_ins.topic_matches_sub(remote_topic): + if isinstance(faas_ins, SubcribeEvent): + if int(topic.split("/")[-2]) != AENTE_SERVICE_INFO_MAPS[faas_ins.cloud_name]["serviceId"]: + continue + if isinstance(faas_ins, MeasureEvent): + if "cloud" in payload: + if payload["cloud"] != faas_ins.cloud_name: + continue + self.submit_job(faas_ins, remote_topic, payload) + + def mqtt_connected_after(self): + """ + This method subscribes the client to all topic from the MQTT broker. + """ + self.transport.subscribe(InternalTopic.EVENT_BUS_SYSTEM_CLOUD_CONN_NOICE) + self.transport.subscribe(InternalTopic.EVENT_BUS_NORTH_CTRL_STATUS) + self.transport.subscribe(InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PING) + for _topic in self.subcribe_downstream_topics: + self.transport.subscribe(_topic) + + for faas_ins in chain(self.generic_event.values(), self.upload_event.values()): + faas_ins.add_subscribe_event(self.transport) + + def on_sys_service_status_ping(self, topic, payload): + """This function is ping.""" + try: + if "message" in payload: + if payload["message"] == "ping": + resp_topic = InternalTopic.EVENT_BUS_SYSTEM_SERVICE_STATUS_PONG + payload = {'message':"pong", 'ServiceId':get_quickfaas_service_id()} + logger.debug("QuickFaaS response service status topic:%s, payload:%s" % (resp_topic, payload)) + self.transport.publish(resp_topic, json.dumps(payload)) + except Exception as error: + try: + logger.error("QuickFaaS response service status failed(%s)" % error) + finally: + error = None + del error + + +class BaseEvent: + + def __init__(self, name): + self.name = name + self.type = None + self.hide_offline_data = 0 + self.func_name = None + self.cloud = None + self.cloud_name = "" + self.msg_type = 0 + self.script = None + self.main = None + self.exec_count = 0 + self.lifetime = 0 + + def topic_matches_sub(self, topic): + return False + + +class GeneralEvent(BaseEvent): + + def __init__(self, name): + super().__init__(name) + self.topic = None + self.qos = 0 + + def topic_matches_sub(self, topic): + if topic_matches_sub(self.topic, topic): + return True + return False + + def add_subscribe_event(self, transport): + transport.subscribe(self.topic, self.qos) + + +class TimerEvent(BaseEvent): + + def __init__(self, name): + super().__init__(name) + self.period = None + self.scheduler = None + + def add_timer(self, scheduler): + self.scheduler = scheduler + if self.type == TIMER_EVENT_MESSAGE: + self.scheduler.add_job((self.timer_event_handler), "interval", seconds=(self.period), id=(self.name), + replace_existing=True) + if self.type == DSA_START_EVENT_MESSAGE: + self.scheduler.add_job((self.timer_event_handler), "oneshot", seconds=0, id=(self.name), replace_existing=True) + + def timer_event_handler(self): + self.scheduler.pause_job(self.name) + try: + logger.debug("Timer(%s) start running" % self.name) + self.exec_count += 1 + t1 = time.time() + self.main() + t2 = time.time() + self.lifetime += t2 - t1 + logger.debug("Timer(%s) running done, life time: %f seconds" % (self.name, t2 - t1)) + except Exception as error: + try: + logger.error("Timer(%s) running error(%s)" % (self.name, error)) + finally: + error = None + del error + + self.scheduler.resume_job(self.name) + + +class MeasureEvent(GeneralEvent): + + def __init__(self, name): + super().__init__(name) + self.event_bus_topic = [InternalTopic.EVENT_BUS_NORTH_MEASURES, InternalTopic.EVENT_BUS_NORTH_MEASURES_ONCHANGE] + self.groups = list() + self.topics = set() + + def topic_matches_sub(self, topic): + for _topic in self.topics: + if topic_matches_sub(_topic, topic): + return True + + return False + + def matches_rule_name(self, name): + for _group in self.groups: + if _group == name: + return True + + return False + + def add_subscribe_event(self, transport): + for group_name in self.groups: + for event_bus_topic in self.event_bus_topic: + full_topic = event_bus_topic.format(groupName=group_name) + transport.subscribe(full_topic) + self.topics.add(full_topic) + + +class AlarmEvent(GeneralEvent): + + def __init__(self, name): + super().__init__(name) + self.event_bus_topic = InternalTopic.EVENT_BUS_NORTH_ALARM + self.alarms = list() + self.topics = set() + + def topic_matches_sub(self, topic): + for _topic in self.topics: + if topic_matches_sub(_topic, topic): + return True + + return False + + def matches_rule_name(self, name): + for _alarm in self.alarms: + if _alarm == name: + return True + + return False + + def add_subscribe_event(self, transport): + for alarm_name in self.alarms: + full_topic = self.event_bus_topic.format(alarmName=alarm_name) + transport.subscribe(full_topic) + self.topics.add(full_topic) + + +class SubcribeEvent(BaseEvent): + + def __init__(self, name): + super().__init__(name) + self.topics = set() + self.qos = 0 + self.payload_type = "" + + def _matches_sub(self, topic): + for _topic in self.topics: + if topic_matches_sub(_topic, topic): + return True + + return False + + def topic_matches_sub(self, topic): + if self.msg_type == 0: + return True and self._matches_sub(topic) + if self.msg_type == 1: + return True and self._matches_sub(topic) + if self.msg_type == 2: + return True and self._matches_sub(topic) + if self.msg_type == 3: + return True and self._matches_sub(topic) + if self.msg_type == 4: + return True and self._matches_sub(topic) + return False \ No newline at end of file diff --git a/APPS_UNCOMPILED/src/quickfaas/file.py b/APPS_UNCOMPILED/src/quickfaas/file.py new file mode 100644 index 0000000..4de5258 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/file.py @@ -0,0 +1,40 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/file.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 787 bytes +import fcntl +from common.Logger import logger + +def faas_write_file(filePath, mode='w', data='', encode='utf-8'): + try: + with open(filePath, mode, encoding=encode) as f: + fcntl.flock(f, fcntl.LOCK_EX) + f.write(str(data)) + fcntl.flock(f, fcntl.LOCK_UN) + except Exception as e: + try: + logger.error("Exception: %s" % e) + raise ValueError(e) + finally: + e = None + del e + + +def faas_read_file(filePath, mode='r', encode='utf-8', size=-1): + try: + data = None + with open(filePath, mode, encoding=encode) as f: + fcntl.flock(f, fcntl.LOCK_SH) + data = f.read(int(size)) + fcntl.flock(f, fcntl.LOCK_UN) + return data + except Exception as e: + try: + logger.error("Exception: %s" % e) + raise ValueError(e) + finally: + e = None + del e diff --git a/APPS_UNCOMPILED/src/quickfaas/global_dict.py b/APPS_UNCOMPILED/src/quickfaas/global_dict.py new file mode 100644 index 0000000..c3a3e09 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/global_dict.py @@ -0,0 +1,29 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/global_dict.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 500 bytes +""" +Drivers +Created on 2021/1/5 +@author: Lius +""" +GLOBAL_ARGS_LIST = list() + +def _set_global_args(args): + global GLOBAL_ARGS_LIST + GLOBAL_ARGS_LIST = args + + +def get(): + return GLOBAL_ARGS_LIST + + +def get_global_parameter(): + global_parameter = dict() + for parameter in GLOBAL_ARGS_LIST: + global_parameter[parameter["key"]] = parameter["value"] + + return global_parameter diff --git a/APPS_UNCOMPILED/src/quickfaas/measure.py b/APPS_UNCOMPILED/src/quickfaas/measure.py new file mode 100644 index 0000000..1ae490f --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/measure.py @@ -0,0 +1,260 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/measure.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 12132 bytes +""" +Drivers +Created on 2021/1/12 +@author: Lius +""" +import json, uuid +from quickfaas.config import get +from .transport import mqtt_transport +from .service_id import AENTE_SERVICE_INFO_MAPS +import common.InternalTopic as InternalTopic + +def recall(names=None, recall_type='measure', timeout=10, realTime=False): + if not names: + names = list() + else: + task_id = str(uuid.uuid1()) + result = dict() + if recall_type == "group": + wrap_payload = {'recallType':"group", + 'groups':names} + mqtt_transport.publish(InternalTopic.EVENT_BUS_NORTH_RECALL_MEASURES, json.dumps(wrap_payload)) + else: + if realTime: + wrap_payload = { + 'task_id': task_id, 'recallType': '"realTime"', + 'timeout': timeout, 'controllers': names} + else: + wrap_payload = {'task_id':task_id, + 'recallType':"measure", 'controllers':names} + result = mqtt_transport.sync_publish_message((InternalTopic.EVENT_BUS_NORTH_RECALL_MEASURES), (json.dumps(wrap_payload)), + event_id=task_id, timeout=timeout) + if result: + return result["controllers"] + return result + + +def recall2(names=None, recall_type='measure', callback=None, userdata=None, timeout=10, realTime=False): + userargs = dict() + if not isinstance(names, list): + names = list() + elif recall_type == "group": + wrap_payload = {'recallType':"group", + 'groups':names} + mqtt_transport.publish(InternalTopic.EVENT_BUS_NORTH_RECALL_MEASURES, json.dumps(wrap_payload)) + else: + task_id = str(uuid.uuid1()) + if realTime: + wrap_payload = { + 'task_id': task_id, 'recallType': '"realTime"', 'timeout': timeout, + 'controllers': names} + else: + wrap_payload = {'task_id':task_id, + 'recallType':"measure", 'controllers':names} + userargs["type"] = "recall2" + userargs["callback"] = callback + userargs["userdata"] = userdata + response_topic = InternalTopic.EVENT_BUS_NORTH_RECALL_MEASURES + "/response" + mqtt_transport.async_publish_message(InternalTopic.EVENT_BUS_NORTH_RECALL_MEASURES, json.dumps(wrap_payload), 0, task_id, timeout, response_topic, userargs) + + +def is_not_controller_write_format(message): + if not isinstance(message, dict): + return False + for _, value in message.items(): + if isinstance(value, dict): + return False + return True + + +def find_vaild_controller_by_measure(measure_name): + controller_name = None + measures_config = get()["measures"] + for measure in measures_config: + if measure["name"] == measure_name: + if controller_name is None: + controller_name = measure["ctrlName"] + else: + raise ValueError("Controller(%s and %s) both have this measure(%s)" % (controller_name, measure["ctrlName"], measure_name)) + + return controller_name + + +def find_controller_in_message(controllers, controller_name): + if not isinstance(controllers, list): + return + for controller in controllers: + if controller["name"] == controller_name: + return controller + + +def _find_uploadRule_config(uploadRules, ctrlName, measureName): + for uploadRule in uploadRules: + if not uploadRule["ctrlName"] == ctrlName or uploadRule["measureName"] == measureName or uploadRule["uploadName"] == measureName: + return uploadRule + + +def write(message, timeout=60, cloudName=None): + task_id = str(uuid.uuid1()) + if isinstance(message, dict): + controllers = list() + if is_not_controller_write_format(message): + for measure_name, measure_value in message.items(): + con_name = find_vaild_controller_by_measure(measure_name) + if con_name is None: + raise ValueError("Unknown measure(%s) belong to any one of the controllers" % measure_name) + measures = {'name':measure_name, 'value':measure_value} + controller = find_controller_in_message(controllers, con_name) + if controller is None: + controller = dict() + controller["name"] = con_name + controller["measures"] = list() + controller["measures"].append(measures) + controllers.append(controller) + else: + controller["measures"].append(measures) + + else: + for con_name, measures in message.items(): + controller = dict() + controller["name"] = con_name + controller["measures"] = list() + for measure_name, measure_value in measures.items(): + measures = dict() + measures["name"] = measure_name + measures["value"] = measure_value + controller["measures"].append(measures) + + controllers.append(controller) + + wrap_payload = {'task_id':task_id, + 'controllers':controllers} + else: + wrap_payload = {'task_id':task_id, 'controllers':message} + if cloudName: + write_message = {'task_id':wrap_payload["task_id"], + 'controllers':[]} + for controller in wrap_payload["controllers"]: + controller_message = {'name':controller["name"], + 'measures':[]} + for measure in controller["measures"]: + uploadRule = _find_uploadRule_config(AENTE_SERVICE_INFO_MAPS[cloudName]["uploadRules"], controller["name"], measure["name"]) + if uploadRule: + if uploadRule["hide"]: + continue + if measure["name"] == uploadRule["uploadName"]: + measure["name"] = uploadRule["measureName"] + controller_message["measures"].append(measure) + + if len(controller_message["measures"]): + write_message["controllers"].append(controller_message) + + else: + write_message = wrap_payload + if len(write_message["controllers"]) == 0: + return + write_response = mqtt_transport.sync_publish_message((InternalTopic.EVENT_BUS_NORTH_WRITE_MEASURES), + (json.dumps(write_message)), event_id=task_id, timeout=timeout) + if write_response is None: + return ('error', -110, 'timeout') + if isinstance(message, dict): + ds1_response = list() + for controller in write_response["controllers"]: + for measure in controller["measures"]: + if cloudName: + uploadRule = _find_uploadRule_config(AENTE_SERVICE_INFO_MAPS[cloudName]["uploadRules"], controller["name"], measure["name"]) + if uploadRule: + if len(uploadRule["uploadName"]): + measure["name"] = uploadRule["uploadName"] + result = "OK" if measure["error_code"] == 0 else "Failed" + var = dict({'value':measure["value"], 'device':controller["name"], 'var_name':measure["name"], 'result':result, + 'error':measure["error_reason"]}) + ds1_response.append(var) + + return ds1_response + for controller in write_response["controllers"]: + for measure in controller["measures"]: + if cloudName: + uploadRule = _find_uploadRule_config(AENTE_SERVICE_INFO_MAPS[cloudName]["uploadRules"], controller["name"], measure["name"]) + if uploadRule and len(uploadRule["uploadName"]): + measure["name"] = uploadRule["uploadName"] + + return write_response["controllers"] + + +def write_plc_values(message, callback=None, userdata=None, timeout=60, cloudName=None): + task_id = str(uuid.uuid1()) + userargs = dict() + if isinstance(message, dict): + controllers = list() + if is_not_controller_write_format(message): + userargs["format"] = 1 + for measure_name, measure_value in message.items(): + con_name = find_vaild_controller_by_measure(measure_name) + if con_name is None: + return + measures = {'name':measure_name, 'value':measure_value} + controller = find_controller_in_message(controllers, con_name) + if controller is None: + controller = dict() + controller["name"] = con_name + controller["measures"] = list() + controller["measures"].append(measures) + controllers.append(controller) + else: + controller["measures"].append(measures) + + else: + userargs["format"] = 2 + for con_name, measures in message.items(): + controller = dict() + controller["name"] = con_name + controller["measures"] = list() + for measure_name, measure_value in measures.items(): + measures = dict() + measures["name"] = measure_name + measures["value"] = measure_value + controller["measures"].append(measures) + + controllers.append(controller) + + wrap_payload = {'task_id':task_id, + 'controllers':controllers} + else: + userargs["format"] = 3 + wrap_payload = {'task_id':task_id, 'controllers':message} + if cloudName: + write_message = {'task_id':wrap_payload["task_id"], + 'controllers':[]} + for controller in wrap_payload["controllers"]: + controller_message = {'name':controller["name"], + 'measures':[]} + for measure in controller["measures"]: + uploadRule = _find_uploadRule_config(AENTE_SERVICE_INFO_MAPS[cloudName]["uploadRules"], controller["name"], measure["name"]) + if uploadRule: + if uploadRule["hide"]: + continue + if measure["name"] == uploadRule["uploadName"]: + measure["name"] = uploadRule["measureName"] + controller_message["measures"].append(measure) + + if len(controller_message["measures"]): + write_message["controllers"].append(controller_message) + + else: + write_message = wrap_payload + if len(write_message["controllers"]) == 0: + return + userargs["type"] = "write_plc_values" + userargs["callback"] = callback + userargs["userdata"] = userdata + userargs["cloudName"] = cloudName + response_topic = InternalTopic.EVENT_BUS_NORTH_WRITE_MEASURES + "/response" + mqtt_transport.async_publish_message(InternalTopic.EVENT_BUS_NORTH_WRITE_MEASURES, json.dumps(write_message), 0, task_id, timeout, response_topic, userargs) diff --git a/APPS_UNCOMPILED/src/quickfaas/messagebus.py b/APPS_UNCOMPILED/src/quickfaas/messagebus.py new file mode 100644 index 0000000..85b91f6 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/messagebus.py @@ -0,0 +1,19 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/messagebus.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 516 bytes +""" +Drivers +Created on 2021/1/5 +@author: Lius +""" +import json +from .transport import mqtt_transport + +def publish(topic, payload, qos=0, timeout=60): + if not isinstance(payload, (str, bytes)): + payload = json.dumps(payload) + mqtt_transport.publish(topic, payload) diff --git a/APPS_UNCOMPILED/src/quickfaas/mqttSparkPlugB.py b/APPS_UNCOMPILED/src/quickfaas/mqttSparkPlugB.py new file mode 100644 index 0000000..75c08c6 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/mqttSparkPlugB.py @@ -0,0 +1,327 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/mqttSparkPlugB.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 12335 bytes +import json, sparkPlugB_pb2, re, time +from common.Logger import logger +from .remotebus import publish +from .controller import get_controller_status +from .clouds import get_status +from .transport import mqtt_transport +from quickfaas.global_dict import get_global_parameter +from threading import Lock +open_file_lock = Lock() +gl_config = {} + +def datatype2int(measure): + dataType = measure["dataType"] + if measure["transformType"]: + return 10 + if dataType == "BIT": + return 11 + if dataType == "BYTE": + return 5 + if dataType == "SINT": + return 1 + if dataType == "WORD": + return 6 + if dataType == "INT": + return 2 + if dataType == "DWORD": + return 7 + if dataType == "DINT": + return 3 + if dataType == "FLOAT": + return 9 + if dataType == "STRING": + return 12 + if dataType == "BCD": + return 13 + if dataType == "BCD32": + return 14 + if dataType == "ULONG": + return 8 + if dataType == "LONG": + return 4 + if dataType == "DOUBLE": + return 10 + return 0 + + +def metric_value_type(metric, value): + if value == "": + if not metric.datatype == 5: + if metric.datatype == 6 or metric.datatype == 7: + metric.uint_value = 0 + elif not metric.datatype == 1: + if metric.datatype == 2 or metric.datatype == 3: + metric.int_value = 0 + elif metric.datatype == 13: + metric.bcd_value = 0 + else: + if metric.datatype == 14: + metric.bcd32_value = 0 + else: + if metric.datatype == 4: + metric.long_value = 0 + else: + if metric.datatype == 8: + metric.ulong_value = 0 + else: + if metric.datatype == 9: + metric.float_value = 0 + else: + if metric.datatype == 10: + metric.double_value = 0 + else: + if metric.datatype == 11: + metric.boolean_value = 0 + else: + if metric.datatype == 12: + metric.string_value = "0" + else: + if not metric.datatype == 5: + if metric.datatype == 6 or metric.datatype == 7: + metric.uint_value = value + else: + if metric.datatype == 1 or metric.datatype == 2 or metric.datatype == 3: + metric.int_value = value + if metric.datatype == 13: + metric.bcd_value = value + else: + if metric.datatype == 14: + metric.bcd32_value = value + else: + if metric.datatype == 4: + metric.long_value = value + else: + if metric.datatype == 8: + metric.ulong_value = value + else: + if metric.datatype == 9: + metric.float_value = value + else: + if metric.datatype == 10: + metric.double_value = value + else: + if metric.datatype == 11: + metric.boolean_value = value + else: + if metric.datatype == 12: + metric.string_value = value + + +def find_cloud_config(cloud_name): + global gl_config + for cloud in gl_config["clouds"]: + if cloud["name"] == cloud_name: + return cloud + + +def publish_controller_data(seq, controller_health, message, cloud_name): + parameter = get_global_parameter() + for ctrlName in message["values"]: + if controller_health[ctrlName] == 0: + continue + cloud = find_cloud_config(cloud_name) + topic = "spBv1.0/" + cloud["args"]["groupId"] + "/" + "DDATA" + "/" + parameter["SN"] + "/" + ctrlName + payload = sparkPlugB_pb2.Payload() + payload.timestamp = message["timestamp"] + for measureName in message["values"][ctrlName]: + for measure in gl_config["measures"]: + if measureName == measure["name"] and ctrlName == measure["ctrlName"]: + metric = payload.metrics.add() + metric.datatype = datatype2int(measure) + metric_value_type(metric, message["values"][ctrlName][measureName]["raw_data"]) + metric.name = ctrlName + "/" + measureName + metric.timestamp = message["values"][ctrlName][measureName]["timestamp"] + break + + payload.seq = seq + seq += 1 + if seq > 255: + seq = 0 + logger.debug("device data, topic is %s" % topic) + publish(topic, payload.SerializeToString(), 0, None, cloud_name) + + return seq + + +def publish_device_online(seq, cloud_name, timestamp, controller): + pub_device_online_flag = 0 + parameter = get_global_parameter() + cloud = find_cloud_config(cloud_name) + topic = "spBv1.0/" + cloud["args"]["groupId"] + "/" + "DBIRTH" + "/" + parameter["SN"] + "/" + controller + payload = sparkPlugB_pb2.Payload() + payload.timestamp = timestamp + for measure in gl_config["measures"]: + if controller == measure["ctrlName"]: + pub_device_online_flag = 1 + metric = payload.metrics.add() + metric.name = controller + "/" + measure["name"] + metric.datatype = datatype2int(measure) + metric_value_type(metric, "") + + if pub_device_online_flag == 0: + return + payload.seq = seq + seq += 1 + if seq > 255: + seq = 0 + logger.debug(" device online, topic is %s" % topic) + publish(topic, payload.SerializeToString(), 0, None, cloud_name) + return seq + + +def publish_device_offline(seq, cloud_name, timestamp, controller): + parameter = get_global_parameter() + cloud = find_cloud_config(cloud_name) + topic = "spBv1.0/" + cloud["args"]["groupId"] + "/" + "DDEATH" + "/" + parameter["SN"] + "/" + controller + payload = sparkPlugB_pb2.Payload() + payload.timestamp = timestamp + payload.seq = seq + logger.debug(" device offline, topic is %s" % topic) + publish(topic, payload.SerializeToString(), 0, None, cloud_name) + + +def publish_controller_status(seq, controller_health, cloud_name): + controller_status_value = get_controller_status() + for ctrlName, ctrlHealth in controller_status_value.items(): + controller_health[ctrlName] = controller_status_value[ctrlName]["health"] + if controller_health[ctrlName] == 1: + seq = publish_device_online(seq, cloud_name, int(time.time()), ctrlName) + + return ( + seq, controller_health) + + +def publish_node_online(bdseq, cloud_name): + parameter = get_global_parameter() + payload = sparkPlugB_pb2.Payload() + payload.timestamp = int(time.time()) + cloud = find_cloud_config(cloud_name) + topic = "spBv1.0/" + cloud["args"]["groupId"] + "/" + "NBIRTH" + "/" + parameter["SN"] + metric_reboot = payload.metrics.add() + metric_reboot.name = "Node Control/Rebirth" + metric_reboot.timestamp = int(time.time()) + metric_reboot.datatype = 11 + metric_reboot.boolean_value = False + payload.seq = 0 + payload.bdseq = bdseq + logger.debug("node online topic is %s" % topic) + publish(topic, payload.SerializeToString(), 0, None, cloud_name) + + +def publish_nodeAndController_message(bdseq, cloud_name): + global gl_config + config_file_path = "/var/user/cfg/device_supervisor/device_supervisor.cfg" + with open(config_file_path, "r", encoding="utf-8") as config_fd: + gl_config = json.load(config_fd) + publish_node_online(bdseq, cloud_name) + file_path = "/tmp/sparkplugb%s.txt" % cloud_name + seq, controller_health = publish_controller_status(1, {}, cloud_name) + with open(file_path, "w", encoding="utf-8") as fd: + sparkPlugb_info_dict = {'seq':seq, + 'controller_health':controller_health} + json.dump(sparkPlugb_info_dict, fd, indent=1, ensure_ascii=False) + + +def SparkPlugB_publish(message, cloud_name='default'): + with open_file_lock: + if get_status(cloud_name): + file_path = "/tmp/sparkplugb%s.txt" % cloud_name + try: + with open(file_path, "r", encoding="utf-8") as fd: + sparkPlugb_info_json = json.load(fd) + if "bdseq" in sparkPlugb_info_json: + return + seq = int(sparkPlugb_info_json["seq"]) + controller_health = sparkPlugb_info_json["controller_health"] + seq = publish_controller_data(seq, controller_health, message, cloud_name) + except IOError: + topic = "ds2/eventbus/system/sparkplugb/rebirth" + mqtt_transport.publish(topic, cloud_name) + logger.error("cloud name %s will Reboot" % cloud_name) + return + else: + with open(file_path, "w", encoding="utf-8") as fd: + sparkPlugb_info_dict = {'seq':seq, + 'controller_health':controller_health} + json.dump(sparkPlugb_info_dict, fd, indent=1, ensure_ascii=False) + + +def add_write_request(write_request, name, value): + names = name.split("/") + for controller in write_request: + if controller["name"] == names[0]: + for measure in controller["measures"]: + if measure["name"] == names[1]: + return write_request + + controller["measures"].append({'name':names[1], 'value':value}) + return write_request + + write_request.append({'name':names[0], 'measures':[{'name':names[1], 'value':value}]}) + return write_request + + +def SparkPlugB_parse(message): + write_request = [] + Payload = sparkPlugB_pb2.Payload() + Payload.ParseFromString(message) + metrics = re.findall("metrics {\\n(.*?)\\n}", str(Payload), re.DOTALL) + for metric in metrics: + name = re.search('name: "(.*?)"', metric).group(1) + datatype = int(re.search("datatype: (\\d+)", metric).group(1)) + if datatype in (1, 2, 3): + value = int(re.search("int_value: (-?\\d+)", metric).group(1)) + else: + if datatype in (5, 6, 7): + value = int(re.search("uint_value: (\\d+)", metric).group(1)) + else: + if datatype == 4: + value = int(re.search("long_value: (-?\\d+)", metric).group(1)) + else: + if datatype == 8: + value = int(re.search("ulong_value: (\\d+)", metric).group(1)) + else: + if datatype == 9: + value = float(re.search("float_value: (\\d+\\.\\d+)", metric).group(1)) + else: + if datatype == 10: + value = float(re.search("double_value: (\\d+\\.\\d+)", metric).group(1)) + else: + if datatype == 11: + value = re.search("boolean_value: (true|false)", metric).group(1) == "true" + else: + if datatype == 12: + value = re.search("str_value: (\\w+)", metric).group(1) + else: + if datatype == 13: + value = int(re.search("bcd_value: (\\d+)", metric).group(1)) + else: + if datatype == 14: + value = int(re.search("bcd32_value: (\\d+)", metric).group(1)) + else: + value = None + write_request = add_write_request(write_request, name, value) + + return write_request + + +def node_cmd_handler(message, cloud_name='default'): + topic = "ds2/eventbus/system/sparkplugb/rebirth" + Payload = sparkPlugB_pb2.Payload() + Payload.ParseFromString(message) + metrics = re.findall("metrics {\\n(.*?)\\n}", str(Payload), re.DOTALL) + for metric in metrics: + name = re.search('name: "(.*?)"', metric).group(1) + datatype = int(re.search("datatype: (\\d+)", metric).group(1)) + if datatype == 11 and name == "Node Control/Rebirth": + value = re.search("boolean_value: (true|false)", metric).group(1) == "true" + if value: + mqtt_transport.publish(topic, cloud_name) + logger.debug("cloud name %s will Rebirth" % cloud_name) diff --git a/APPS_UNCOMPILED/src/quickfaas/packet_handler.py b/APPS_UNCOMPILED/src/quickfaas/packet_handler.py new file mode 100644 index 0000000..f510f3d --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/packet_handler.py @@ -0,0 +1,45 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/packet_handler.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 1681 bytes +""" +Drivers +Created on 2021/2/4 +@author: Lius +""" +import json, base64 +from common.Logger import logger + +def wrap_upstream_payload(topic, payload, qos=1, wizard_data=None): + if wizard_data: + if isinstance(payload, dict): + wizard_data = dict() + elif "topic" not in wizard_data: + logger.warn("wizard_data not found topic, set default topic:%s" % topic) + wizard_data["topic"] = topic + else: + if "qos" not in wizard_data: + logger.warn("wizard_data not found qos, set default qos:%s" % qos) + wizard_data["qos"] = qos + else: + wizard_data["qos"] = int(wizard_data["qos"]) + if "payload" not in wizard_data: + logger.warn("wizard_data not found payload, set default payload:%s" % payload) + wizard_data["payload"] = str(base64.b64encode(payload), "utf-8") + else: + if isinstance(wizard_data["payload"], bytes): + wizard_data["payload"] = str(base64.b64encode(wizard_data["payload"]), "utf-8") + else: + if isinstance(wizard_data["payload"], str): + wizard_data["payload"] = str(base64.b64encode(wizard_data["payload"].encode("utf-8")), "utf-8") + else: + wizard_data["payload"] = str(base64.b64encode(json.dumps(wizard_data["payload"]).encode("utf-8")), "utf-8") + elif isinstance(payload, bytes): + upload_payload = str(base64.b64encode(payload), "utf-8") + else: + upload_payload = str(base64.b64encode(payload.encode("utf-8")), "utf-8") + return { + 'topic': topic, 'qos': qos, 'payload': upload_payload, 'wizard_data': wizard_data} diff --git a/APPS_UNCOMPILED/src/quickfaas/quick_function.py b/APPS_UNCOMPILED/src/quickfaas/quick_function.py new file mode 100644 index 0000000..42f51ff --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/quick_function.py @@ -0,0 +1,126 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/quick_function.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 5655 bytes +""" +Drivers +Created on 2021/1/3 +@author: Lius +""" +import os, json, time, queue, signal, logging +from common.Logger import logger +from .service_id import SERVICE_ID +from .controller import CONTROLLERS_STATUS +from .transport import mqtt_transport +from .faas_handler import FaaSHandler +from .global_dict import _set_global_args +from .LwTimer import LwTimerSched +from concurrent.futures import ThreadPoolExecutor +controllers_status_file = "/var/run/python/ds2/controllers_status.json" +quickFaas_status_file = "/var/run/python/ds2/quickfaas_status.json" + +class BoundThreadPoolExecutor(ThreadPoolExecutor): + + def __init__(self, *args, **kwargs): + (super(BoundThreadPoolExecutor, self).__init__)(*args, **kwargs) + self._work_queue = queue.Queue(100) + + +class QuickFunction: + + def __init__(self, service_id, config_file): + self.service_id = service_id + self.config_file = config_file + self.transport = mqtt_transport + SERVICE_ID["service_id"] = service_id + self.thread_pool = BoundThreadPoolExecutor(max_workers=3) + self.scheduler = LwTimerSched(self.thread_pool) + self.transport.scheduler = self.scheduler + self.faashandler = FaaSHandler(self.scheduler, self.transport, self.thread_pool) + self._QuickFunction__register_signal() + + def __register_signal(self): + signal.signal(signal.SIGINT, self._QuickFunction__signal_handler) + signal.signal(signal.SIGTERM, self._QuickFunction__signal_handler) + + def __signal_handler(self, signalnum, handler): + logger.info("signal. signalnum: {}".format(signalnum)) + if self.faashandler: + self.faashandler.shutdown(wait=True) + self.faashandler = None + if self.scheduler: + self.scheduler.shutdown(wait=True) + self.scheduler = None + logger.info("QuickFaaS exit...") + os.system("rm -fr " + quickFaas_status_file) + + def _config_loader(self): + logger.info("Loaded config files: {}".format(self.config_file)) + config_file = os.path.dirname(self.config_file) + "/QuickFaaS.json" + if os.path.exists(self.config_file): + os.rename(self.config_file, config_file) + with open(config_file, "r", encoding="utf-8-sig") as load_fd: + load_cfg = json.load(load_fd) + return load_cfg + + def _find_cloud_config(self, clouds, cloud_name): + for cloud in clouds: + if cloud["name"] == cloud_name: + return cloud + + def _run_faas(self, cfg): + _set_global_args(cfg["labels"]) + upper_level = cfg["misc"]["logLvl"].upper() + if upper_level == "DEBUG": + logger.set_level(logging.DEBUG) + else: + if upper_level == "INFO": + logger.set_level(logging.INFO) + else: + if upper_level == "WARN" or upper_level == "WARNING": + logger.set_level(logging.WARN) + else: + if upper_level == "ERROR": + logger.set_level(logging.ERROR) + else: + try: + with open(controllers_status_file, "r", encoding="utf-8-sig") as load_fd: + controllers = json.load(load_fd) + except Exception: + controllers = list() + + for controller in controllers: + CONTROLLERS_STATUS[controller["name"]] = controller + + self.faashandler.update_clouds_info(cfg["clouds"]) + self.faashandler.start_handler() + quickfaas = cfg["quickfaas"] + for faas in quickfaas["genericFuncs"]: + logger.info("genericFuncs") + self.faashandler.add_generic_faas_instance(faas) + + for faas in quickfaas["uploadFuncs"]: + cloud = self._find_cloud_config(cfg["clouds"], faas["cloudName"]) + if not cloud is None: + if cloud["enable"] == 0: + continue + logger.info("uploadFuncs") + self.faashandler.add_upload_faas_instance(faas, cloud) + + for faas in quickfaas["downloadFuncs"]: + cloud = self._find_cloud_config(cfg["clouds"], faas["cloudName"]) + if not cloud is None: + if cloud["enable"] == 0: + continue + logger.info("downloadFuncs") + self.faashandler.add_download_faas_instance(faas, cloud) + + quickFaas_status_file_path = os.path.dirname(quickFaas_status_file) + if not os.path.exists(quickFaas_status_file_path): + os.system("mkdir -p " + quickFaas_status_file_path) + with open(quickFaas_status_file, "w", encoding="utf-8") as f: + f.write(json.dumps({'status':1, 'timestamp':int(time.time())})) + self.scheduler.start() diff --git a/APPS_UNCOMPILED/src/quickfaas/remotebus.py b/APPS_UNCOMPILED/src/quickfaas/remotebus.py new file mode 100644 index 0000000..41d1f39 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/remotebus.py @@ -0,0 +1,33 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/remotebus.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 774 bytes +""" +Drivers +Created on 2021/1/7 +@author: Lius +""" +import json +from common.Logger import logger +from .transport import mqtt_transport +from .packet_handler import wrap_upstream_payload +from .service_id import get_upstream_publish_topic_by_name + +def publish(topic, payload, qos=1, wizard_data=None, cloud_name='default'): + try: + if not isinstance(payload, (str, bytes)): + payload = json.dumps(payload) + wrap_payload = wrap_upstream_payload(topic, payload, qos, wizard_data) + mqtt_transport.publish(get_upstream_publish_topic_by_name(cloud_name), json.dumps(wrap_payload)) + return True + except Exception as e: + try: + logger.error("Publish message failed(%s)" % e) + finally: + e = None + del e + + return False diff --git a/APPS_UNCOMPILED/src/quickfaas/service_id.py b/APPS_UNCOMPILED/src/quickfaas/service_id.py new file mode 100644 index 0000000..04e51d2 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/service_id.py @@ -0,0 +1,38 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/service_id.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 874 bytes +""" +Drivers +Created on 2021/1/20 +@author: Lius +""" +from common.Logger import logger +import common.InternalTopic as InternalTopic +SERVICE_ID = {"service_id": 1011} +AENTE_SERVICE_INFO_MAPS = dict() + +def get_quickfaas_service_id(): + global SERVICE_ID + return SERVICE_ID["service_id"] + + +def get_agnet_service_id(cloud_name): + global AENTE_SERVICE_INFO_MAPS + try: + return AENTE_SERVICE_INFO_MAPS[cloud_name]["serviceId"] + except Exception as e: + try: + logger.error("Get cloud connection status error. %s" % e) + finally: + e = None + del e + + +def get_upstream_publish_topic_by_name(cloud_name): + service_id = get_agnet_service_id(cloud_name) + src_service_id = get_quickfaas_service_id() + return InternalTopic.EVENT_BUS_REMOTE_UPSTREAM_PUBLISH.format(agentServiceId=service_id, srcServiceId=src_service_id) diff --git a/APPS_UNCOMPILED/src/quickfaas/transport.py b/APPS_UNCOMPILED/src/quickfaas/transport.py new file mode 100644 index 0000000..ec22a99 --- /dev/null +++ b/APPS_UNCOMPILED/src/quickfaas/transport.py @@ -0,0 +1,525 @@ +# uncompyle6 version 3.9.2 +# Python bytecode version base 3.7.0 (3394) +# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52) +# [Clang 14.0.6 ] +# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/src/quickfaas/transport.py +# Compiled at: 2024-04-18 03:12:55 +# Size of source mod 2**32: 24881 bytes +""" +Drivers +Created on 2021/1/5 +@author: Lius +""" +import os, time, random, string, uuid, base64, weakref, threading, traceback +import paho.mqtt.client as mqtt +from common.Logger import logger +import common.Utilities as Utilities +from common.LocalSetting import get_port +import common.InternalTopic as InternalTopic +from common.InternalPath import EC_SYSTEM_INFO +from .service_id import AENTE_SERVICE_INFO_MAPS + +class MQTTTransport(object): + _instance_lock = threading.Lock() + + def __init__(self): + self.thread_pool = None + self.scheduler = None + self.ds1 = None + self._client_id = "quickfaas@" + "".join(random.sample(string.ascii_letters + string.digits, 8)) + self._on_connected_lock = threading.Lock() + self.on_mqtt_connected_handler = None + self._on_disconnected_lock = threading.Lock() + self.on_mqtt_disconnected_handler = None + self._on_message_lock = threading.Lock() + self.on_mqtt_message_received_handler = None + self._mqtt_publish_lock = threading.Lock() + self._establish_event_resp_topics = list() + self.sync_establish_event = dict() + self.sync_establish_event_lock = threading.Lock() + self.async_establish_event = dict() + self.async_establish_event_lock = threading.Lock() + self._mqtt_client = self._create_mqtt_client() + self.connect() + + def __new__(cls, *args, **kwargs): + with MQTTTransport._instance_lock: + if not hasattr(cls, "_instance"): + MQTTTransport._instance = super().__new__(cls) + return MQTTTransport._instance + + def _create_mqtt_client(self): + """ + Create the MQTT client object and assign all necessary event handler callbacks. + """ + logger.info("Creating client for connecting using MQTT over TCP") + mqtt_client = mqtt.Client(client_id=(self._client_id), clean_session=False, protocol=(mqtt.MQTTv311)) + self_weakref = weakref.ref(self) + + def on_connect(client, userdata, flags, rc): + this = self_weakref() + logger.debug("connected with result code: {}".format(rc)) + with self._on_connected_lock: + if rc: + logger.warning("connection failed.") + else: + if this.on_mqtt_connected_handler: + try: + this.on_mqtt_connected_handler() + except Exception: + logger.error("Unexpected error calling on_mqtt_connected_handler") + logger.error(traceback.format_exc()) + + def on_disconnect(client, userdata, rc): + this = self_weakref() + logger.info("disconnected with result code: {}".format(rc)) + with self._on_disconnected_lock: + if this.on_mqtt_disconnected_handler: + try: + this.on_mqtt_disconnected_handler(rc) + except Exception: + logger.error("Unexpected error calling on_mqtt_disconnected_handler") + logger.error(traceback.format_exc()) + + else: + logger.warning("No event handler callback set for on_mqtt_disconnected_handler") + + def on_subscribe(client, userdata, mid, granted_qos): + logger.debug("suback received for {}".format(mid)) + + def on_unsubscribe(client, userdata, mid): + logger.debug("UNSUBACK received for {}".format(mid)) + + def on_publish(client, userdata, mid): + logger.debug("payload published for {}".format(mid)) + + def _find_uploadRule_config(uploadRules, ctrlName, measureName): + for uploadRule in uploadRules: + if uploadRule["measureName"] == measureName and uploadRule["ctrlName"] == ctrlName: + return uploadRule + + def _convert_write_plc_response(data, cloudName): + response_data = list() + for controller in data["controllers"]: + for measure in controller["measures"]: + if cloudName: + uploadRule = _find_uploadRule_config(AENTE_SERVICE_INFO_MAPS[cloudName]["uploadRules"], controller["name"], measure["name"]) + if uploadRule: + if len(uploadRule["uploadName"]): + measure["name"] = uploadRule["uploadName"] + result = "OK" if measure["error_code"] == 0 else "Failed" + var = dict({'value':measure["value"], 'device':controller["name"], 'var_name':measure["name"], 'result':result, + 'error':measure["error_reason"]}) + response_data.append(var) + + return response_data + + def _convert_recall2_response(data): + response = {'timestamp':(time.time)(), + 'values':{}} + for controller in data["controllers"]: + response["values"][controller["name"]] = dict() + for measure in controller["measures"]: + var = {'raw_data':measure["value"], + 'timestamp':measure["timestamp"], 'status':measure["health"]} + response["values"][controller["name"]][measure["name"]] = var + + return response + + def _build_async_response(event, payload): + if event.type == "write_plc_values": + if event.format == 3: + for controller in payload["controllers"]: + for measure in controller["measures"]: + if event.cloudName: + uploadRule = _find_uploadRule_config(AENTE_SERVICE_INFO_MAPS[event.cloudName]["uploadRules"], controller["name"], measure["name"]) + if uploadRule and len(uploadRule["uploadName"]): + measure["name"] = uploadRule["uploadName"] + + response_data = payload["controllers"] + else: + response_data = _convert_write_plc_response(payload, event.cloudName) + else: + if event.type == "recall2": + response_data = _convert_recall2_response(payload) + else: + if event.type == "GetName" or event.type == "Upload": + response_data = payload["output"] + else: + if event.type == "Download": + response_data = base64.b64decode(payload["output"]) + else: + response_data = payload + return response_data + + def _mqtt_topic_matches_sub(topics, sub_topic): + for topic in topics: + if mqtt.topic_matches_sub(topic, sub_topic): + return True + + return False + + def _get_establish_event_id(topic, payload): + event_id = "" + sub_topics = [ + InternalTopic.LWTSDB_INSERT_RESPONSE, InternalTopic.LWTSDB_QUERY_RESPONSE, + InternalTopic.LWTSDB_REMOVE_RESPONSE] + if _mqtt_topic_matches_sub(sub_topics, topic): + event_id = topic.split("/")[-1] + else: + if "task_id" in payload: + event_id = payload["task_id"] + return event_id + + def on_message(client, userdata, mqtt_message): + this = self_weakref() + logger.debug("Recived message(topic:%s, payload length:%s Bytes)" % ( + mqtt_message.topic, len(mqtt_message.payload))) + try: + if _mqtt_topic_matches_sub(this._establish_event_resp_topics, mqtt_message.topic): + payload = Utilities.json_to_obj(mqtt_message.payload) + event_id = _get_establish_event_id(mqtt_message.topic, payload) + with self.sync_establish_event_lock: + if event_id in this.sync_establish_event: + this.sync_establish_event[event_id].result = payload + return + with self.async_establish_event_lock: + if event_id in this.async_establish_event: + event = this.async_establish_event[event_id] + response_data = _build_async_response(event, payload) + if event.callback: + if self.thread_pool: + if event.callback.__code__.co_argcount == 3: + self.thread_pool.submit(event.callback, response_data, event.userdata, self.ds1) + else: + self.thread_pool.submit(event.callback, response_data, event.userdata) + logger.debug("%s Callback submit" % event.type) + else: + if event.callback.__code__.co_argcount == 3: + event.callback(response_data, event.userdata, self.ds1) + else: + event.callback(response_data, event.userdata) + if self.scheduler: + self.scheduler.remove_job(event_id) + del this.async_establish_event[event_id] + return + logger.warning("No event handler response(topic:%s, payload:%s)" % (mqtt_message.topic, payload)) + return + except Exception: + logger.error("Unexpected error calling sync_establish_event") + logger.error(traceback.format_exc()) + return + else: + with self._on_message_lock: + if this.on_mqtt_message_received_handler: + try: + this.on_mqtt_message_received_handler(mqtt_message.topic, mqtt_message.payload) + except Exception: + logger.error("Unexpected error calling on_mqtt_message_received_handler") + logger.error(traceback.format_exc()) + + else: + logger.debug("No event handler callback set.") + + mqtt_client.on_connect = on_connect + mqtt_client.on_disconnect = on_disconnect + mqtt_client.on_subscribe = on_subscribe + mqtt_client.on_unsubscribe = on_unsubscribe + mqtt_client.on_publish = on_publish + mqtt_client.on_message = on_message + mqtt_client.reconnect_delay_set(1, 360) + logger.debug("Created MQTT protocol client, assigned callbacks") + return mqtt_client + + def _cleanup_transport_on_error(self): + logger.info("Forcing paho disconnect to prevent it from automatically reconnecting") + self._mqtt_client.disconnect() + self._mqtt_client.loop_stop() + if threading.current_thread() == self._mqtt_client._thread: + logger.debug("in paho thread. nulling _thread") + self._mqtt_client._thread = None + logger.debug("Done forcing paho disconnect") + + def connect(self): + """ + Connect to the MQTT broker. + """ + logger.info("connecting to mqtt broker") + try: + port = get_port() + logger.info("Connect using port {} (TCP)".format(port)) + if os.path.exists(EC_SYSTEM_INFO): + self._mqtt_client.username_pw_set("admin", "admin") + rc = self._mqtt_client.connect(host="127.0.0.1", port=port) + except Exception as e: + try: + self._cleanup_transport_on_error() + logger.error("Unexpected Paho failure during connect. %s" % e) + finally: + e = None + del e + + logger.debug("_mqtt_client.connect returned rc={}".format(rc)) + self._mqtt_client.loop_start() + + def disconnect(self): + """ + Disconnect from the MQTT broker. + """ + logger.info("disconnecting MQTT client") + try: + try: + rc = self._mqtt_client.disconnect() + except Exception as e: + try: + logger.error("Unexpected Paho failure during disconnect. %s" % e) + finally: + e = None + del e + + finally: + self._mqtt_client.loop_stop() + if threading.current_thread() == self._mqtt_client._thread: + logger.debug("in paho thread. nulling _thread") + self._mqtt_client._thread = None + + logger.debug("_mqtt_client.disconnect returned rc={}".format(rc)) + + def subscribe(self, topic, qos=0, callback=None): + """ + This method subscribes the client to one topic from the MQTT broker. + """ + rc, mid = (None, None) + logger.info("subscribing to {} with qos {}".format(topic, qos)) + try: + rc, mid = self._mqtt_client.subscribe(topic, qos=qos) + except Exception as e: + try: + logger.error("Unexpected Paho failure during subscribe. %s" % e) + finally: + e = None + del e + + logger.debug("_mqtt_client.subscribe returned rc={} mid={}".format(rc, mid)) + + def unsubscribe(self, topic, callback=None): + """ + Unsubscribe the client from one topic on the MQTT broker. + """ + rc, mid = (None, None) + logger.info("unsubscribing from {}".format(topic)) + try: + rc, mid = self._mqtt_client.unsubscribe(topic) + except Exception as e: + try: + logger.error("Unexpected Paho failure during unsubscribe. %s" % e) + finally: + e = None + del e + + logger.debug("_mqtt_client.unsubscribe returned rc={} mid={}".format(rc, mid)) + + def publish(self, topic, payload, qos=0, callback=None): + """ + Send a message via the MQTT broker. + """ + with self._mqtt_publish_lock: + logger.debug("### current thread name: %s ###" % threading.current_thread().name) + logger.debug("Publish message(topic:%s, payload length:%s Bytes)" % (topic, len(payload))) + rc, mid = (None, None) + try: + rc, mid = self._mqtt_client.publish(topic=topic, payload=payload, qos=qos) + except Exception as e: + try: + logger.error("Unexpected Paho failure during publish. %s" % e) + return + finally: + e = None + del e + + logger.debug("_mqtt_client.publish returned rc={} mid={}".format(rc, mid)) + + def _build_async_timeout_response(self, async_event): + response = ('error', -110, 'timeout') + if async_event.type in ('insert_request', 'remove_request'): + response = {'return_code':-4, + 'return_msg':"Request timeout"} + else: + if async_event.type in ('query_request', ): + response = {'total':0, + 'offset':0, 'size':0, 'data':[]} + return response + + def _handle_event_timeout(self, event_id): + with self.async_establish_event_lock: + if event_id in self.async_establish_event: + event = self.async_establish_event[event_id] + logger.warn("Event(%s) have timeout" % event.type) + if event.callback: + response_data = self._build_async_timeout_response(event) + if self.thread_pool: + if event.callback.__code__.co_argcount == 3: + self.thread_pool.submit(event.callback, response_data, event.userdata, self.ds1) + else: + self.thread_pool.submit(event.callback, response_data, event.userdata) + logger.info("%s Callback submit" % event.type) + else: + if event.callback.__code__.co_argcount == 3: + event.callback(response_data, event.userdata, self.ds1) + else: + event.callback(response_data, event.userdata) + if self.scheduler: + self.scheduler.remove_job(event_id) + del self.async_establish_event[event_id] + + def async_publish_message(self, topic, payload, qos=0, event_id=None, timeout=60, resp_topic=None, userargs=dict()): + if not event_id: + event_id = str(uuid.uuid1()) + if not resp_topic: + resp_topic = topic + "/response" + with self.async_establish_event_lock: + if resp_topic not in self._establish_event_resp_topics: + self.subscribe(topic=resp_topic, qos=0) + self._establish_event_resp_topics.append(resp_topic) + self.async_establish_event[event_id] = EstablishEvent(event_id, resp_topic) + self.async_establish_event[event_id].type = userargs["type"] + if "format" in userargs: + self.async_establish_event[event_id].format = userargs["format"] + self.async_establish_event[event_id].callback = userargs["callback"] + self.async_establish_event[event_id].userdata = userargs["userdata"] + if "cloudName" in userargs: + self.async_establish_event[event_id].cloudName = userargs["cloudName"] + if self.scheduler: + self.scheduler.add_job((self._handle_event_timeout), + "interval", args=event_id, seconds=timeout, + id=event_id, + replace_existing=True) + self.publish(topic, payload, qos) + + def sync_publish_message(self, topic, payload, qos=0, event_id=None, timeout=60, resp_topic=None): + """ + A synchronization publish to the message handler. + """ + if not event_id: + event_id = str(uuid.uuid1()) + if not resp_topic: + resp_topic = topic + "/response" + with self.sync_establish_event_lock: + if resp_topic not in self._establish_event_resp_topics: + self.subscribe(topic=resp_topic, qos=0) + self._establish_event_resp_topics.append(resp_topic) + self.sync_establish_event[event_id] = EstablishEvent(event_id, resp_topic) + self.publish(topic, payload, qos) + count = 0 + logger.debug("current thread name: %s timeout: %s" % (threading.current_thread().name, timeout)) + while True: + if count > timeout * 10: + logger.warning("Request timeout! task id({})".format(event_id)) + break + time.sleep(0.1) + result = self.get_sync_establish_event(event_id) + if result: + self.del_sync_establish_event(event_id) + return result + count = count + 1 + + self.del_sync_establish_event(event_id) + + def get_sync_establish_event(self, event_id): + """ + Get synchronization event response. + """ + with self.sync_establish_event_lock: + if event_id in self.sync_establish_event: + return self.sync_establish_event[event_id].result + + def del_sync_establish_event(self, event_id): + """ + Delete a synchronization event to the message received handler. + """ + with self.sync_establish_event_lock: + if event_id in self.sync_establish_event: + logger.debug("Delete task. id {}".format(event_id)) + del self.sync_establish_event[event_id] + + +class EstablishEvent: + + def __init__(self, event_id, event_topic, timeout=60): + self.event_id = event_id + self.event_topic = event_topic + self.timeout = timeout + self.result = None + self.type = None + self.format = None + self.callback = None + self.userdata = None + self.cloudName = None + + def set_result(self, result): + self.result = result + + +class OperationManager(object): + __doc__ = "\n Tracks pending operations and thier associated callbacks until completion.\n " + + def __init__(self): + self._pending_operation_callbacks = {} + self._unknown_operation_completions = {} + self._lock = threading.Lock() + + def establish_operation(self, mid, callback=None): + """Establish a pending operation identified by MID, and store its completion callback. + + If the operation has already been completed, the callback will be triggered. + """ + trigger_callback = False + with self._lock: + if mid in self._unknown_operation_completions: + del self._unknown_operation_completions[mid] + trigger_callback = True + else: + self._pending_operation_callbacks[mid] = callback + logger.debug("Waiting for response on MID: {}".format(mid)) + if trigger_callback: + logger.debug("Response for MID: {} was received early - triggering callback".format(mid)) + if callback: + try: + callback() + except Exception: + logger.error("Unexpected error calling callback for MID: {}".format(mid)) + logger.error(traceback.format_exc()) + + else: + logger.exception("No callback for MID: {}".format(mid)) + + def complete_operation(self, mid): + """ + Complete an operation identified by MID and trigger the associated completion callback. + """ + callback = None + trigger_callback = False + with self._lock: + if mid in self._pending_operation_callbacks: + callback = self._pending_operation_callbacks[mid] + del self._pending_operation_callbacks[mid] + trigger_callback = True + else: + logger.warning("Response received for unknown MID: {}".format(mid)) + self._unknown_operation_completions[mid] = mid + if trigger_callback: + logger.debug("Response received for recognized MID: {} - triggering callback".format(mid)) + if callback: + try: + callback() + except Exception: + logger.error("Unexpected error calling callback for MID: {}".format(mid)) + logger.error(traceback.format_exc()) + + else: + logger.debug("No callback set for MID: {}".format(mid)) + + +mqtt_transport = MQTTTransport() +if __name__ == "__main__": + mt = MQTTTransport() diff --git a/Pub_Sub/ba_facility/thingsboard/pub/sendData.py b/Pub_Sub/ba_facility/thingsboard/pub/sendData.py new file mode 100644 index 0000000..f956b1f --- /dev/null +++ b/Pub_Sub/ba_facility/thingsboard/pub/sendData.py @@ -0,0 +1,145 @@ +# Enter your python code. +import json, os, time +from datetime import datetime as dt +from common.Logger import logger +from quickfaas.remotebus import publish +from quickfaas.global_dict import get as get_params +from quickfaas.global_dict import _set_global_args + +def reboot(): + #basic = Basic() + logger.info("!" * 10 + "REBOOTING DEVICE" + "!"*10) + r = os.popen("kill -s SIGHUP `cat /var/run/python/supervisord.pid`").read() + logger.info(f"REBOOT : {r}") + +def checkFileExist(filename): + path = "/var/user/files" + if not os.path.exists(path): + logger.info("no folder making files folder in var/user") + os.makedirs(path) + with open(path + "/" + filename, "a") as f: + json.dump({}, f) + if not os.path.exists(path + "/" + filename): + logger.info("no creds file making creds file") + with open(path + "/" + filename, "a") as f: + json.dump({}, f) + +def convertDStoJSON(ds): + j = dict() + for x in ds: + j[x["key"]] = x["value"] + return j + +def convertJSONtoDS(j): + d = [] + for key in j.keys(): + d.append({"key": key, "value": j[key]}) + return d + +def checkCredentialConfig(): + logger.info("CHECKING CONFIG") + cfgpath = "/var/user/cfg/device_supervisor/device_supervisor.cfg" + credspath = "/var/user/files/creds.json" + cfg = dict() + with open(cfgpath, "r") as f: + cfg = json.load(f) + clouds = cfg.get("clouds") + logger.info(clouds) + #if not configured then try to configure from stored values + if clouds[0]["args"]["clientId"] == "unknown" or clouds[0]["args"]["username"] == "unknown" or not clouds[0]["args"]["passwd"] or clouds[0]["args"]["passwd"] == "unknown": + checkFileExist("creds.json") + with open(credspath, "r") as c: + creds = json.load(c) + if creds: + logger.info("updating config with stored data") + clouds[0]["args"]["clientId"] = creds["clientId"] + clouds[0]["args"]["username"] = creds["userName"] + clouds[0]["args"]["passwd"] = creds["password"] + cfg["clouds"] = clouds + cfg = checkParameterConfig(cfg) + with open(cfgpath, "w", encoding='utf-8') as n: + json.dump(cfg, n, indent=1, ensure_ascii=False) + reboot() + else: + #assuming clouds is filled out, if data is different then assume someone typed in something new and store it, if creds is empty fill with clouds' data + checkFileExist("creds.json") + with open(credspath, "r") as c: + logger.info("updating stored file with new data") + cfg = checkParameterConfig(cfg) + with open(cfgpath, "w", encoding='utf-8') as n: + json.dump(cfg, n, indent=1, ensure_ascii=False) + creds = json.load(c) + if creds: + if creds["clientId"] != clouds[0]["args"]["clientId"]: + creds["clientId"] = clouds[0]["args"]["clientId"] + if creds["userName"] != clouds[0]["args"]["username"]: + creds["userName"] = clouds[0]["args"]["username"] + if creds["password"] != clouds[0]["args"]["passwd"]: + creds["password"] = clouds[0]["args"]["passwd"] + else: + creds["clientId"] = clouds[0]["args"]["clientId"] + creds["userName"] = clouds[0]["args"]["username"] + creds["password"] = clouds[0]["args"]["passwd"] + with open(credspath, "w") as cw: + json.dump(creds,cw) + +def checkParameterConfig(cfg): + logger.info("Checking Parameters!!!!") + paramspath = "/var/user/files/params.json" + cfgparams = convertDStoJSON(cfg.get("labels")) + #check stored values + checkFileExist("params.json") + with open(paramspath, "r") as f: + logger.info("Opened param storage file") + params = json.load(f) + if params: + if cfgparams != params: + #go through each param + #if not "unknown" and cfg and params aren't the same take from cfg likely updated manually + #if key in cfg but not in params copy to params + logger.info("equalizing params between cfg and stored") + for key in cfgparams.keys(): + try: + if cfgparams[key] != params[key] and cfgparams[key] != "unknown": + params[key] = cfgparams[key] + except: + params[key] = cfgparams[key] + cfg["labels"] = convertJSONtoDS(params) + _set_global_args(convertJSONtoDS(params)) + with open(paramspath, "w") as p: + json.dump(params, p) + else: + with open(paramspath, "w") as p: + logger.info("initializing param file with params in memory") + json.dump(convertDStoJSON(get_params()), p) + cfg["labels"] = get_params() + + return cfg + +# Helper function to split the payload into chunks +def chunk_payload(payload, chunk_size=20): + chunked_values = list(payload["values"].items()) + for i in range(0, len(chunked_values), chunk_size): + yield { + "ts": payload["ts"], + "values": dict(chunked_values[i:i+chunk_size]) + } + +def sendData(message): + payload = {} + payload["ts"] = (round(dt.timestamp(dt.now())/600)*600)*1000 + payload["values"] = {} + try: + checkCredentialConfig() + except Exception as e: + logger.error(e) + for measure in message["measures"]: + try: + logger.debug(measure) + payload["values"][measure["name"]] = measure["value"] + except Exception as e: + logger.error(e) + for chunk in chunk_payload(payload=payload): + publish(__topic__, json.dumps(chunk), __qos__) + time.sleep(2) + publish("v1/devices/me/attributes", json.dumps({"latestReportTime": (round(dt.timestamp(dt.now())/600)*600)*1000}), __qos__) \ No newline at end of file diff --git a/Pub_Sub/ba_facility/thingsboard/sub/receiveCommands.py b/Pub_Sub/ba_facility/thingsboard/sub/receiveCommands.py new file mode 100644 index 0000000..5685be4 --- /dev/null +++ b/Pub_Sub/ba_facility/thingsboard/sub/receiveCommands.py @@ -0,0 +1,75 @@ +import json, time +from datetime import datetime as dt +from quickfaas.measure import recall, write +from quickfaas.remotebus import publish +from common.Logger import logger + +# Helper function to split the payload into chunks +def chunk_payload(payload, chunk_size=20): + chunked_values = list(payload["values"].items()) + for i in range(0, len(chunked_values), chunk_size): + yield { + "ts": payload["ts"], + "values": dict(chunked_values[i:i+chunk_size]) + } + +def sync(): + #get new values and send + payload = {"ts": round(dt.timestamp(dt.now()))*1000, "values": {}} + topic = "v1/devices/me/telemetry" + try: + data = recall()#json.loads(recall().decode("utf-8")) + except Exception as e: + logger.error(e) + logger.debug(data) + for controller in data: + for measure in controller["measures"]: + #publish measure + payload["values"][measure["name"]] = measure["value"] + logger.debug("Sending on topic: {}".format(topic)) + logger.debug("Sending value: {}".format(payload)) + for chunk in chunk_payload(payload=payload): + publish(topic, json.dumps(chunk), 1) + time.sleep(2) + +def writeplctag(value): + #value in the form {"measurement": , "value": } + try: + #value = json.loads(value.replace("'",'"')) + logger.info(value) + #payload format: [{"name": "advvfdipp", "measures": [{"name": "manualfrequencysetpoint", "value": 49}]}] + message = [{"name": "ba_facility", "measures":[{"name":value["measurement"], "value": value["value"]}]}] + resp = write(message) + logger.info("RETURN FROM WRITE: {}".format(resp)) + return True + except Exception as e: + logger.error(e) + return False + +def receiveCommand(topic, payload, wizard_api): + try: + logger.debug(topic) + logger.info(json.loads(payload)) + p = json.loads(payload) + command = p["method"] + commands = { + "sync": sync, + "writeplctag": writeplctag, + } + if command == "setPLCTag": + result = commands["writeplctag"](p["params"]) + if result: + sync() + #commands[command](p["mac"].lower(),p["payload"]["value"], wizard_api) + #logger.debug(command) + ack(topic.split("/")[-1], wizard_api) + except Exception as e: + logger.error(e) + + +def ack(msgid, wizard_api): + #logger.debug(msgid) + #logger.debug(mac) + #logger.debug(name) + #logger.debug(value) + wizard_api.mqtt_publish("v1/devices/me/rpc/response/" + str(msgid), json.dumps({"msg": {"time": time.time()}, "metadata": "", "msgType": ""})) \ No newline at end of file diff --git a/Pub_Sub/ba_facility/thingsboard/tags.csv b/Pub_Sub/ba_facility/thingsboard/tags.csv new file mode 100644 index 0000000..9c01bfb --- /dev/null +++ b/Pub_Sub/ba_facility/thingsboard/tags.csv @@ -0,0 +1,147 @@ +MeasuringPointName,ControllerName,GroupName,UploadType,DataType,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,DataEndianReverse,ReadOffset,ReadLength,DataParseMethod,BitId,storageLwTSDB +air_compressor_discharge_lo_spt,ba_facility,default,periodic,FLOAT,,,,SPT_Air_Comp_Disch_Lo,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +air_compressor_discharge_psi,ba_facility,default,periodic,FLOAT,,,,Val_Air_Comp_Disch_PSI,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +air_compressor_lo_discharge_alm,ba_facility,default,periodic,BIT,,,0,AL0_Air_Compressor_Lo_Discharge_PSI,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +gas_flare_differential_pressure,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Flare_DP,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_flare_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Flare_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_flare_lifetime,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Flare_Lifetime,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_flare_static_pressure,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Flare_SP,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_flare_temp,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Flare_Temp,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_flare_today,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Flare_Today,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_flare_yesterday,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Flare_Yesterday,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_sales_differential_pressure,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Sales_DP,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_sales_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Sales_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_sales_lifetime,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Sales_Lifetime,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_sales_static_pressure,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Sales_SP,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_sales_temp,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Sales_Temp,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_sales_today,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Sales_Today,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_sales_yesterday,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Sales_Yesterday,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_1_differential_pressure,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_1_DP,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_1_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_1_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_1_lifetime,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_1_Lifetime,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_1_static_pressure,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_1_SP,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_1_temp,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_1_Temp,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_1_today,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_1_Today,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_1_yesterday,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_1_Yesterday,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_2_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_2_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_2_lifetime,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_2_Lifetime,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_2_static_pressure,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_2_SP,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_2_temp,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_2_Temp,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_2_today,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_2_Today,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_2_yesterday,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_2_Yesterday,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_3_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_3_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_3_lifetime,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_3_Lifetime,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_3_static_pressure,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_3_SP,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_3_temp,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_3_Temp,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_3_today,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_3_Today,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +gas_vessel_3_yesterday,ba_facility,default,periodic,FLOAT,,,,Val_Gas_Vessel_3_Yesterday,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +lact_bsw,ba_facility,default,periodic,FLOAT,,,,Val_Lact_Meter_BSW,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +lact_density,ba_facility,default,periodic,FLOAT,,,,Val_Lact_Meter_Density,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +lact_faulted_alm,ba_facility,default,periodic,BIT,,,0,AL0_Lact_Unit_Faulted,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +lact_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Lact_Meter_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +lact_lifetime,ba_facility,default,periodic,FLOAT,,,,Val_Lact_Meter_Lifetime,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +lact_run_cmd,ba_facility,default,periodic,BIT,,,0,CMD_Lact_Run,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +lact_tank_level,ba_facility,default,periodic,FLOAT,,,,Val_Lact_Tank_Level_Scaled,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +lact_temp,ba_facility,default,periodic,FLOAT,,,,Val_Lact_Meter_Temp,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +lact_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_Lact_Meter_Todays,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +lact_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_Lact_Meter_Yest,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +light_cmd,ba_facility,default,periodic,BIT,,,0,CMD_Lights,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +oil_tank_selector,ba_facility,default,periodic,DINT,0,,,Oil_Tank_Selector,,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +oil_working_tank_level_cmd,ba_facility,default,periodic,FLOAT,,,,CMD_Oil_Working_Tank_Level,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +ot_1_hi_alm,ba_facility,default,periodic,BIT,,,0,AL0_OT1_High_Level,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +ot_1_pv,ba_facility,default,periodic,FLOAT,,,,Val_OT1_PV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +ot_1_sv,ba_facility,default,periodic,FLOAT,,,,Val_OT1_SV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +ot_2_hi_alm,ba_facility,default,periodic,BIT,,,0,AL0_OT2_High_Level,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +ot_2_pv,ba_facility,default,periodic,FLOAT,,,,Val_OT2_PV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +ot_2_sv,ba_facility,default,periodic,FLOAT,,,,Val_OT2_SV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +power_supply_fault_alm,ba_facility,default,periodic,BIT,,,0,AL0_Power_Supply_Failure,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +power_supply_status,ba_facility,default,periodic,BIT,,,0,Raw_Power_Supply_Status,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +st_1_hi_alm,ba_facility,default,periodic,BIT,,,0,AL0_ST1_High_Level,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +st_1_pv,ba_facility,default,periodic,FLOAT,,,,Val_ST1_PV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +st_1_sv,ba_facility,default,periodic,FLOAT,,,,Val_ST1_SV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +sts_either_wtp_run,ba_facility,default,periodic,BIT,,,0,Sts_Either_WTP_Run,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +ups_fault_alm,ba_facility,default,periodic,BIT,,,0,AL0_UPS_Battery_Fault,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +ups_status,ba_facility,default,periodic,BIT,,,0,Raw_UPS_Status,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +vessel_1_oil_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Oil_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_oil_last_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Oil_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_oil_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Oil_Month_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_oil_t1,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Oil_T1,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_oil_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Oil_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_oil_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Oil_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_water_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Water_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_water_last_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Water_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_water_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Water_Month_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_water_t1,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Water_T1,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_water_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Water_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_1_water_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_1_Water_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_oil_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Oil_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_oil_last_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Oil_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_oil_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Oil_Month_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_oil_t1,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Oil_T1,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_oil_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Oil_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_oil_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Oil_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_water_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Water_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_water_last_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Water_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_water_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Water_Month_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_water_t1,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Water_T1,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_water_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Water_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_2_water_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_2_Water_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_oil_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Oil_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_oil_last_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Oil_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_oil_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Oil_Month_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_oil_t1,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Oil_T1,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_oil_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Oil_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_oil_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Oil_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_water_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Water_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_water_last_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Water_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_water_month_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Water_Month_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_water_t1,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Water_T1,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_water_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Water_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +vessel_3_water_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_Vessel_3_Water_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +water_tank_selector,ba_facility,default,periodic,DINT,0,,,Water_Tank_Selector,,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +working_oil_tank_level,ba_facility,default,periodic,FLOAT,,,,Working_Oil_Tank_Level,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +working_water_tank_level,ba_facility,default,periodic,FLOAT,,,,Working_Water_Tank_Level,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wt_1_hi_alm,ba_facility,default,periodic,BIT,,,0,AL0_WT1_High_Level,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +wt_1_pv,ba_facility,default,periodic,FLOAT,,,,Val_WT1_PV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wt_1_sv,ba_facility,default,periodic,FLOAT,,,,Val_WT1_SV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wt_2_hi_alm,ba_facility,default,periodic,BIT,,,0,AL0_WT2_High_Level,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +wt_2_pv,ba_facility,default,periodic,FLOAT,,,,Val_WT2_PV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wt_2_sv,ba_facility,default,periodic,FLOAT,,,,Val_WT2_SV,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_alm,ba_facility,default,periodic,DINT,0,,,WTP1_Alarm,,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_control_valve_position_cmd,ba_facility,default,periodic,FLOAT,,,,CMD_WTP1_Control_Valve_POS,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_discharge_psi,ba_facility,default,periodic,FLOAT,,,,Val_WTP1_Discharge_PSI_Scaled,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_WTP1_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_hi_discharge_spt,ba_facility,default,periodic,FLOAT,,,,SPT_WTP1_Hi_Discharge,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_last_month_total,ba_facility,default,periodic,FLOAT,,,,Val_WTP1_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_monthly_total,ba_facility,default,periodic,FLOAT,,,,Val_WTP1_Monthly_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_run_cmd,ba_facility,default,periodic,BIT,,,0,WTP1_Run_CMD,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_1_ss_clear_fault_cmd,ba_facility,default,periodic,BIT,,,0,CMD_WTP1_SS_ClearFault,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_1_ss_fault_alm,ba_facility,default,periodic,BIT,,,0,AL0_WTP1_SS_Faulted,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_1_ss_run_cmd,ba_facility,default,periodic,BIT,,,0,CMD_WTP1_SS_Run,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_1_ss_running,ba_facility,default,periodic,BIT,,,0,FBK_WTP1_SS_Running,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_1_start_pb_cmd,ba_facility,default,periodic,BIT,,,0,CMD_WTP1_Start_PB,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_1_start_spt,ba_facility,default,periodic,FLOAT,,,,SPT_WTP1_Start,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_stop_pb_cmd,ba_facility,default,periodic,BIT,,,0,CMD_WTP1_Stop_PB,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_1_stop_spt,ba_facility,default,periodic,FLOAT,,,,SPT_WTP1_Stop,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_t1,ba_facility,default,periodic,FLOAT,,,,Val_WTP1_T1,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_WTP1_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_1_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_WTP1_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_alm,ba_facility,default,periodic,DINT,0,,,WTP2_Alarm,,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_control_valve_position_cmd,ba_facility,default,periodic,FLOAT,,,,CMD_WTP2_Control_Valve_POS,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_discharge_psi,ba_facility,default,periodic,FLOAT,,,,Val_WTP2_Discharge_PSI_Scaled,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_flow_rate,ba_facility,default,periodic,FLOAT,,,,Val_WTP2_FR,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_hi_discharge_spt,ba_facility,default,periodic,FLOAT,,,,SPT_WTP2_Hi_Discharge,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_last_month_total,ba_facility,default,periodic,FLOAT,,,,Val_WTP2_LastMonth_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_monthly_total,ba_facility,default,periodic,FLOAT,,,,Val_WTP2_Monthly_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_run_cmd,ba_facility,default,periodic,BIT,,,0,WTP2_Run_CMD,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_2_ss_clear_fault_cmd,ba_facility,default,periodic,BIT,,,0,CMD_WTP2_SS_ClearFault,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_2_ss_fault_alm,ba_facility,default,periodic,BIT,,,0,AL0_WTP2_SS_Faulted,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_2_ss_run_cmd,ba_facility,default,periodic,BIT,,,0,CMD_WTP2_SS_Run,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_2_ss_running,ba_facility,default,periodic,BIT,,,0,FBK_WTP2_SS_Running,,,ro,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_2_start_pb_cmd,ba_facility,default,periodic,BIT,,,0,CMD_WTP2_Start_PB,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_2_start_spt,ba_facility,default,periodic,FLOAT,,,,SPT_WTP2_Start,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_stop_pb_cmd,ba_facility,default,periodic,BIT,,,0,CMD_WTP2_Stop_PB,,,rw,,,none,,,,,,,,,,,,,0,,,,,,,1 +wtp_2_stop_spt,ba_facility,default,periodic,FLOAT,,,,SPT_WTP2_Stop,2,,rw,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_t1,ba_facility,default,periodic,FLOAT,,,,Val_WTP2_T1,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_todays_total,ba_facility,default,periodic,FLOAT,,,,Val_WTP2_Todays_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 +wtp_2_yesterday_total,ba_facility,default,periodic,FLOAT,,,,Val_WTP2_Yest_Total,2,,ro,,,none,,,,,,,,,,,,,,,,,,,,1 \ No newline at end of file diff --git a/Pub_Sub/cameratrailer_mb/thingsboard/cameratrailer_tb_v6.cfg b/Pub_Sub/cameratrailer_mb/thingsboard/cameratrailer_tb_v6.cfg index c6db036..adbe365 100644 --- a/Pub_Sub/cameratrailer_mb/thingsboard/cameratrailer_tb_v6.cfg +++ b/Pub_Sub/cameratrailer_mb/thingsboard/cameratrailer_tb_v6.cfg @@ -115,7 +115,7 @@ "groups": [ { "name": "default", - "uploadInterval": 10, + "uploadInterval": 600, "enablePerOnchange": 0, "LwTSDBSize": 1000, "strategy": 1, diff --git a/Pub_Sub/rr_facility/thingsboard/alarm.csv b/Pub_Sub/rr_facility/thingsboard/alarm.csv index 86c9cb7..44bdab4 100644 --- a/Pub_Sub/rr_facility/thingsboard/alarm.csv +++ b/Pub_Sub/rr_facility/thingsboard/alarm.csv @@ -12,6 +12,7 @@ tp_1_lo_discharge_alm,rr_facility,tp_1_lo_discharge_alm,5,eq,1,none,eq,,Failure, tp_1_lo_oil_alm,rr_facility,tp_1_lo_oil_alm,5,eq,1,none,eq,,Failure,default tp_1_lo_suction_alm,rr_facility,tp_1_lo_suction_alm,5,eq,1,none,eq,,Failure,default tp_1_oil_cooler_failed_to_start_alm,rr_facility,tp_1_oil_cooler_failed_to_start_alm,5,eq,1,none,eq,,Failure,default +tp_1_vfd_faulted_alm,rr_facility,tp_1_vfd_faulted_alm,5,eq,1,none,eq,,Failure,default tp_2_charge_pump_fail_to_start_alm,rr_facility,tp_2_charge_pump_fail_to_start_alm,5,eq,1,none,eq,,Failure,default tp_2_hi_a_winding_alm,rr_facility,tp_2_hi_a_winding_alm,5,eq,1,none,eq,,Failure,default tp_2_hi_b_winding_alm,rr_facility,tp_2_hi_b_winding_alm,5,eq,1,none,eq,,Failure,default @@ -24,6 +25,7 @@ tp_2_lo_discharge_alm,rr_facility,tp_2_lo_discharge_alm,5,eq,1,none,eq,,Failure, tp_2_lo_oil_alm,rr_facility,tp_2_lo_oil_alm,5,eq,1,none,eq,,Failure,default tp_2_lo_suction_alm,rr_facility,tp_2_lo_suction_alm,5,eq,1,none,eq,,Failure,default tp_2_oil_cooler_failed_to_start_alm,rr_facility,tp_2_oil_cooler_failed_to_start_alm,5,eq,1,none,eq,,Failure,default +tp_2_vfd_faulted_alm,rr_facility,tp_2_vfd_faulted_alm,5,eq,1,none,eq,,Failure,default ww_1_comms_alm,rr_facility,ww_1_comms_alm,5,eq,1,none,eq,,water well 1 comms failure,default ww_1_control_power_alm,rr_facility,ww_1_control_power_alm,5,eq,1,none,eq,,Failure,default ww_1_hi_discharge_alm,rr_facility,ww_1_hi_discharge_alm,5,eq,1,none,eq,,Failure,default diff --git a/Pub_Sub/rr_facility/thingsboard/rr_facility.csv b/Pub_Sub/rr_facility/thingsboard/rr_facility_measures.csv similarity index 99% rename from Pub_Sub/rr_facility/thingsboard/rr_facility.csv rename to Pub_Sub/rr_facility/thingsboard/rr_facility_measures.csv index 3653413..667af87 100644 --- a/Pub_Sub/rr_facility/thingsboard/rr_facility.csv +++ b/Pub_Sub/rr_facility/thingsboard/rr_facility_measures.csv @@ -1,385 +1,389 @@ MeasuringPointName,ControllerName,GroupName,UploadType,DeadZoneType,DeadZonePercent,DataType,ArrayIndex,EnableBit,BitIndex,reverseBit,Address,Decimal,Len,CodeType,ReadWrite,Unit,Description,Transform Type,MaxValue,MinValue,MaxScale,MinScale,Gain,Offset,startBit,endBit,Pt,Ct,Mapping_table,TransDecimal,bitMap,msecSample,storageLwTSDB,DataEndianReverse,ReadOffset,ReadLength,WriteOffset,WriteLength,DataParseMethod,BitId,pollCycle,EnableRequestCount,RequestCount -pond_level_input_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Pond_Level_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_1_Discharge_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_suction_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_1_Suction_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_thrust_chamber_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_1_Thrust_Chamber_Temp_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_vibration_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_1_Vibration_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_2_Discharge_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_suction_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_2_Suction_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_thrust_chamber_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_2_Thrust_Chamber_Temp_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_vibration_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_2_Vibration_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_charge_pump_fail_to_start_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Charge_Pump_Failed_To_Start,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_a_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_A_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_b_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_B_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_c_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_C_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_Discharge_PSI,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_inboard_temp_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_Inboard_Temp_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_outboard_temp_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_Outboard_Temp_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_vibration_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_Vibration_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Low_Discharge_PSI,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_lo_oil_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Low_Oil_Level_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_lo_suction_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Low_Suction_Pressure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_oil_cooler_failed_to_start_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Oil_Cooler_Failed_To_Start,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_vfd_faulted_alm,rr_facility,default,periodic,,,BIT,,,,0,Al0_TP1_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_charge_pump_fail_to_start_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Charge_Pump_Failed_To_Start,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_a_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_A_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_b_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_B_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_c_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_C_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_Discharge_PSI,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_inboard_temp_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_Inboard_Temp_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_outboard_temp_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_Outboard_Temp_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_vibration_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_Vibration_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Low_Discharge_PSI,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_lo_oil_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Low_Oil_Level_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_lo_suction_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Low_Suction_Pressure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_oil_cooler_failed_to_start_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Oil_Cooler_Failed_To_Start,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_vfd_faulted_alm,rr_facility,default,periodic,,,BIT,,,,0,Al0_TP2_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_1_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_2_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_3_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_4_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_5_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_6_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_vfd_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_Run_TP1_VFD,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_vfd_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_Run_TP2_VFD,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_a_winding_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_A_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_b_winding_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_B_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_c_winding_bypass_alm,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_C_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_discharge_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_Discharge_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_inboard_temp_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_Inboard_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_outboard_temp_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_Outboard_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_hi_vibration_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_Vibration_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_lo_discharge_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Low_Discharge_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_master_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Master_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_master_fault_clear_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Master_Fault_Clear,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_oil_cooler_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Oil_Cooler_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_oil_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Oil_Level_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_a_winding_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_A_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_b_winding_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_B_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_c_winding_bypass_alm,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_C_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_discharge_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_Discharge_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_inboard_temp_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_Inboard_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_outboard_temp_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_Outboard_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_hi_vibration_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_Vibration_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_lo_discharge_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Low_Discharge_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_master_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Master_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_master_fault_clear_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Master_Fault_Clear,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_oil_cooler_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Oil_Cooler_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_oil_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Oil_Level_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_all_start_stop_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW_Start_Stop_Together,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, current_day,rr_facility,default,periodic,,,INT,,0,,,Current_Day,,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, current_hour,rr_facility,default,periodic,,,INT,,0,,,Current_Hour,,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, current_minute,rr_facility,default,periodic,,,INT,,0,,,Current_Minute,,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, current_second,rr_facility,default,periodic,,,INT,,0,,,Current_Second,,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_charge_pump_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP1_Charge_Pump_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_lo_oil,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP1_Low_Oil_Level,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_oil_cooler_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP1_Oil_Cooler_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_1_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP1_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_charge_pump_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP2_Charge_Pump_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_lo_oil,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP2_Low_Oil_Level,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_oil_cooler_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP2_Oil_Cooler_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -tp_2_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP2_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_1_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW1_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_2_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW2_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_3_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW3_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_4_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW4_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_5_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW5_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -ww_6_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW6_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, -pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW1,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW2,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW3,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW4,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW5,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW6,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW1,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW2,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW3,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW4,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW5,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW6,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -suction_permissive_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Suction_PSI_Permissive,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_hi_a_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_A_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_hi_b_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_B_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_hi_c_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_C_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_High_Discharge_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_hi_vibration_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_High_Vibration_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_hi_inboard_temp_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_Inboard_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_Low_Discharge_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_hi_outboard_temp_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_Outboard_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_pid_manual_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_PID_Manual_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_hi_a_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_A_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_hi_b_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_B_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_hi_c_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_C_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_High_Discharge_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_hi_vibration_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_High_Vibration_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_hi_inboard_temp_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_Inboard_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_Low_Discharge_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_hi_outboard_temp_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_Outboard_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_pid_manual_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_PID_Manual_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, pond_level,rr_facility,default,periodic,,,FLOAT,,,,,Val_Pond_Level_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +pond_level_input_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Pond_Level_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +suction_permissive_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Suction_PSI_Permissive,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_1_a_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_A_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_1_b_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_B_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_1_c_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_C_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_charge_pump_fail_to_start_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Charge_Pump_Failed_To_Start,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_charge_pump_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP1_Charge_Pump_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, tp_1_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_Discharge_PSI_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_1_Discharge_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_flow,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP1_Flowrate,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_hi_a_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_A_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_a_winding_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_A_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_a_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_A_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_hi_b_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_B_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_b_winding_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_B_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_b_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_B_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_hi_c_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_C_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_c_winding_bypass_alm,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_C_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_c_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_C_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_Discharge_PSI,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_discharge_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_Discharge_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_High_Discharge_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_hi_inboard_temp_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_Inboard_Temp_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_inboard_temp_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_Inboard_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_inboard_temp_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_Inboard_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_hi_outboard_temp_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_Outboard_Temp_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_outboard_temp_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_Outboard_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_outboard_temp_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_Outboard_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_hi_vibration_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_High_Vibration_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_vibration_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_High_Vibration_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_hi_vibration_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_High_Vibration_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_1_inboard_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_Inboard_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_lifetime,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP1_T1,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Low_Discharge_PSI,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_lo_discharge_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Low_Discharge_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_Low_Discharge_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_lo_oil,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP1_Low_Oil_Level,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_lo_oil_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Low_Oil_Level_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_lo_suction_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Low_Suction_Pressure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_master_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Master_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_master_fault_clear_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Master_Fault_Clear,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_oil_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Oil_Level_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_oil_cooler_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_Oil_Cooler_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_oil_cooler_failed_to_start_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP1_Oil_Cooler_Failed_To_Start,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_oil_cooler_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP1_Oil_Cooler_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, tp_1_outboard_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_Outboard_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP1_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_pid_manual_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_PID_Manual_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP1_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_1_suction,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_Suction_PSI_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_suction_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_1_Suction_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_thrust_chamber_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_1_Thrust_Chamber_Temp_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, tp_1_thrust_chamber_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_Thrust_Chamber_Temperature,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_1_vibration,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_Vibration_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_a_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_A_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_b_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_B_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_c_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_C_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Discharge_PSI_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_inboard_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Inboard_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_outboard_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Outboard_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_suction,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Suction_PSI_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_thrust_chamber_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Thrust_Chamber_Temperature,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_2_vibration,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Vibration_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_flow_rate,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_FlowRate,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_FM_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_last_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_LastMonth_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_lifetime_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_Lifetime_Flow_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_Monthly_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -tp_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_Today_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_vfd_faulted_alm,rr_facility,default,periodic,,,BIT,,,,0,Al0_TP1_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, tp_1_vfd_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP1_VFD_Frequency_Setpoint,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_1_vfd_output_current,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP1_VFD_Output_Current,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_1_vfd_output_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP1_VFD_Output_Frequency,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_1_vfd_output_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP1_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_vfd_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_Run_TP1_VFD,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP1_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_1_vibration,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_1_Vibration_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_1_vibration_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_1_Vibration_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_a_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_A_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_b_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_B_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_c_winding_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_C_Winding_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_charge_pump_fail_to_start_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Charge_Pump_Failed_To_Start,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_charge_pump_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP2_Charge_Pump_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Discharge_PSI_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_2_Discharge_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_flow,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP2_Flowrate,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_hi_a_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_A_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_a_winding_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_A_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_a_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_A_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_hi_b_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_B_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_b_winding_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_B_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_b_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_B_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_hi_c_winding_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_C_Winding_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_c_winding_bypass_alm,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_C_Winding_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_c_winding_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_C_Winding_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_Discharge_PSI,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_discharge_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_Discharge_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_High_Discharge_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_hi_inboard_temp_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_Inboard_Temp_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_inboard_temp_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_Inboard_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_inboard_temp_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_Inboard_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_hi_outboard_temp_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_Outboard_Temp_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_outboard_temp_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_Outboard_Temp_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_outboard_temp_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_Outboard_High,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_hi_vibration_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_High_Vibration_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_vibration_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_High_Vibration_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_hi_vibration_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_High_Vibration_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_inboard_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Inboard_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_lifetime,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP2_T1,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Low_Discharge_PSI,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_lo_discharge_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Low_Discharge_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_Low_Discharge_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_lo_oil,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP2_Low_Oil_Level,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_lo_oil_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Low_Oil_Level_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_lo_suction_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Low_Suction_Pressure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_master_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Master_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_master_fault_clear_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Master_Fault_Clear,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_oil_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Oil_Level_Alarm_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_oil_cooler_bypass_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_Oil_Cooler_Bypass,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_oil_cooler_failed_to_start_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP2_Oil_Cooler_Failed_To_Start,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_oil_cooler_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP2_Oil_Cooler_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_outboard_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Outboard_Temp_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_TP2_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_pid_manual_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_PID_Manual_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_TP2_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_suction,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Suction_PSI_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_suction_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_2_Suction_Input_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_thrust_chamber_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_2_Thrust_Chamber_Temp_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_thrust_chamber_temp,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Thrust_Chamber_Temperature,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_vfd_faulted_alm,rr_facility,default,periodic,,,BIT,,,,0,Al0_TP2_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, tp_2_vfd_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP2_VFD_Frequency_Setpoint,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_2_vfd_output_current,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP2_VFD_Output_Current,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_2_vfd_output_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP2_VFD_Output_Frequency,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, tp_2_vfd_output_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP2_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_vfd_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_Run_TP2_VFD,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_TP2_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_2_vibration,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_2_Vibration_Scaled,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_2_vibration_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_TP_2_Vibration_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +tp_flow_rate,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_FlowRate,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_last_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_LastMonth_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_lifetime_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_Lifetime_Flow_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_Monthly_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_Today_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +tp_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_TP_FM_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_1_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_1_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Discharge_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_1_downhole,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Downhole_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_1_flow_rate,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_FR,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_1_last_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_LastMonth_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_1_lifetime_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_1_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW1_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW1,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW1,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW1_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_1_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW1_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_1_vfd_current,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_VFD_Output_Current,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_1_vfd_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_VFD_Output_Frequency,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW1_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_1_vfd_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_1_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW1_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_2_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_2_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Discharge_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_2_downhole,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Downhole_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_2_flow_rate,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_FR,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_2_last_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_LastMonth_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_2_lifetime_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_2_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW2_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW2,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW2,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW2_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_2_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW2_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_2_vfd_current,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_VFD_Output_Current,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_2_vfd_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_VFD_Output_Frequency,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW2_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_2_vfd_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_2_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW2_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_3_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_3_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Discharge_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_3_downhole,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Downhole_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_3_flow_rate,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_FR,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_3_last_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_LastMonth_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_3_lifetime_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_3_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW3_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW3,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW3,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW3_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_3_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW3_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_3_vfd_current,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_VFD_Output_Current,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_3_vfd_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_VFD_Output_Frequency,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW3_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_3_vfd_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_3_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW3_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_4_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_4_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Discharge_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_4_downhole,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Downhole_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_4_flow_rate,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_FR,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_4_last_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_LastMonth_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_4_lifetime_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_4_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW4_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW4,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW4,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW4_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_4_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW4_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_4_vfd_current,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_VFD_Output_Current,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_4_vfd_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_VFD_Output_Frequency,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW4_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_4_vfd_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_4_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW4_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_5_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_5_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Discharge_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_5_downhole,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Downhole_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_5_flow_rate,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_FR,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_5_last_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_LastMonth_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_5_lifetime_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_5_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW5_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW5,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW5,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW5_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_5_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW5_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_5_vfd_current,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_VFD_Output_Current,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_5_vfd_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_VFD_Output_Frequency,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW5_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_5_vfd_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_5_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW5_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_comms_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_Water_Well_6_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_comms_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Communication_Check,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_control_power_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Control_Power_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_6_discharge,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Discharge_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_6_downhole,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Downhole_PSI,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_6_flow_rate,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_FR,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_hand_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Hand_Freq,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_hi_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Hi_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_hi_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Hi_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_hi_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Hi_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_hi_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Hi_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_hi_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Hi_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_hi_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Hi_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_hoa_in_manual_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_HOA_In_Manual,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_6_last_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_LastMonth_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_6_lifetime_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_lo_discharge_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Lo_Discharge_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_lo_discharge_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Lo_Disch_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_lo_discharge_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Lo_Disch_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_lo_flow_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Lo_Flow_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_lo_flow_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Lo_Flow_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_lo_flow_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Lo_Flow_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_lo_pip_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Lo_PIP_Alarm,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_lo_pip_enable_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Enable_Lo_PIP_Alarm,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_lo_pip_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_Lo_PIP_Alarm,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_manual_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Manual_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_master_comm_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_Master_Communication_Failure,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_month_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_Month_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_pid_auto_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_PID_Auto,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_pid_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_WW6_PID_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_pond_level_start_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Start_WW6,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_pond_level_stop_spt,rr_facility,default,periodic,,,FLOAT,,,,,SPT_Pond_Level_Stop_WW6,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_run_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW6_Run,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_today_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_Todays_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_vfd_alm,rr_facility,default,periodic,,,BIT,,,,0,AL0_WW6_VFD_Faulted,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, ww_6_vfd_current,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_VFD_Output_Current,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, ww_6_vfd_frequency,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_VFD_Output_Frequency,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, -ww_6_vfd_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, \ No newline at end of file +ww_6_vfd_frequency_spt,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_VFD_Frequency_Setpoint,2,,,rw,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_vfd_running,rr_facility,default,periodic,,,BIT,,,,0,FBK_WW6_VFD_Running,,,,ro,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, +ww_6_vfd_voltage,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_VFD_Output_Voltage,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_6_yesterday_total,rr_facility,default,periodic,,,FLOAT,,,,,Val_WW6_Flowmeter_Yest_Total,2,,,ro,,,none,,,,,,,,,,,,,,,1,,,,,,,,,0, +ww_all_start_stop_cmd,rr_facility,default,periodic,,,BIT,,,,0,CMD_WW_Start_Stop_Together,,,,rw,,,none,,,,,,,,,,,,,0,,1,,,,,,,,,0, \ No newline at end of file diff --git a/code snippets/getPLCData.ipynb b/code snippets/getPLCData.ipynb index 499880a..d25837b 100644 --- a/code snippets/getPLCData.ipynb +++ b/code snippets/getPLCData.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -12,18 +12,41 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ - "ip_address = \"166.195.196.165\"" + "ip_address = \"107.84.248.90\" # \"ngrok.iot.inhandnetworks.com:3021\"" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "ResponseError", + "evalue": "failed to get attribute list", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mResponseError\u001b[39m Traceback (most recent call last)", + "\u001b[36mFile \u001b[39m\u001b[32m~/miniconda3/envs/pycomm/lib/python3.13/site-packages/pycomm3/logix_driver.py:499\u001b[39m, in \u001b[36mLogixDriver._get_instance_attribute_list_service\u001b[39m\u001b[34m(self, program)\u001b[39m\n\u001b[32m 498\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m response:\n\u001b[32m--> \u001b[39m\u001b[32m499\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m ResponseError(\n\u001b[32m 500\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33msend_unit_data returned not valid data - \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mresponse.error\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m\n\u001b[32m 501\u001b[39m )\n\u001b[32m 503\u001b[39m last_instance = \u001b[38;5;28mself\u001b[39m._parse_instance_attribute_list(response, tag_list)\n", + "\u001b[31mResponseError\u001b[39m: send_unit_data returned not valid data - Object state conflict", + "\nThe above exception was the direct cause of the following exception:\n", + "\u001b[31mResponseError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[5]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m LogixDriver(ip_address) \u001b[38;5;28;01mas\u001b[39;00m plc:\n\u001b[32m 2\u001b[39m \u001b[38;5;66;03m#info = plc.get_plc_info()\u001b[39;00m\n\u001b[32m 3\u001b[39m plctags = plc.get_tag_list()\n\u001b[32m 4\u001b[39m \u001b[38;5;66;03m#print(info)\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32m~/miniconda3/envs/pycomm/lib/python3.13/site-packages/pycomm3/cip_driver.py:144\u001b[39m, in \u001b[36mCIPDriver.__enter__\u001b[39m\u001b[34m(self)\u001b[39m\n\u001b[32m 143\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34m__enter__\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[32m--> \u001b[39m\u001b[32m144\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mopen\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 145\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\n", + "\u001b[36mFile \u001b[39m\u001b[32m~/miniconda3/envs/pycomm/lib/python3.13/site-packages/pycomm3/logix_driver.py:165\u001b[39m, in \u001b[36mLogixDriver.open\u001b[39m\u001b[34m(self)\u001b[39m\n\u001b[32m 163\u001b[39m ret = \u001b[38;5;28msuper\u001b[39m().open()\n\u001b[32m 164\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m ret:\n\u001b[32m--> \u001b[39m\u001b[32m165\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_initialize_driver\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_init_args\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 166\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m ret\n", + "\u001b[36mFile \u001b[39m\u001b[32m~/miniconda3/envs/pycomm/lib/python3.13/site-packages/pycomm3/logix_driver.py:192\u001b[39m, in \u001b[36mLogixDriver._initialize_driver\u001b[39m\u001b[34m(self, init_tags, init_program_tags)\u001b[39m\n\u001b[32m 187\u001b[39m \u001b[38;5;28mself\u001b[39m._cfg[\u001b[33m\"\u001b[39m\u001b[33mcip_path\u001b[39m\u001b[33m\"\u001b[39m].pop(\n\u001b[32m 188\u001b[39m -\u001b[32m1\u001b[39m\n\u001b[32m 189\u001b[39m ) \u001b[38;5;66;03m# strip off backplane/0 segment, not used for these processors\u001b[39;00m\n\u001b[32m 191\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m init_tags:\n\u001b[32m--> \u001b[39m\u001b[32m192\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mget_tag_list\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprogram\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43m*\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43minit_program_tags\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[32m 194\u001b[39m \u001b[38;5;28mself\u001b[39m.__log.info(\u001b[33m\"\u001b[39m\u001b[33mInitialization complete.\u001b[39m\u001b[33m\"\u001b[39m)\n", + "\u001b[36mFile \u001b[39m\u001b[32m~/miniconda3/envs/pycomm/lib/python3.13/site-packages/pycomm3/cip_driver.py:100\u001b[39m, in \u001b[36mwith_forward_open..wrapped\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 98\u001b[39m msg = \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mTarget did not connected. \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc.\u001b[34m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m will not be executed.\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 99\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m ResponseError(msg)\n\u001b[32m--> \u001b[39m\u001b[32m100\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32m~/miniconda3/envs/pycomm/lib/python3.13/site-packages/pycomm3/logix_driver.py:422\u001b[39m, in \u001b[36mLogixDriver.get_tag_list\u001b[39m\u001b[34m(self, program, cache)\u001b[39m\n\u001b[32m 420\u001b[39m \u001b[38;5;28mself\u001b[39m.__log.info(\u001b[33m\"\u001b[39m\u001b[33mStarting tag list upload...\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 421\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m program == \u001b[33m\"\u001b[39m\u001b[33m*\u001b[39m\u001b[33m\"\u001b[39m:\n\u001b[32m--> \u001b[39m\u001b[32m422\u001b[39m tags = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_get_tag_list\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 423\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m prog \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m._info[\u001b[33m\"\u001b[39m\u001b[33mprograms\u001b[39m\u001b[33m\"\u001b[39m]:\n\u001b[32m 424\u001b[39m tags += \u001b[38;5;28mself\u001b[39m._get_tag_list(prog)\n", + "\u001b[36mFile \u001b[39m\u001b[32m~/miniconda3/envs/pycomm/lib/python3.13/site-packages/pycomm3/logix_driver.py:438\u001b[39m, in \u001b[36mLogixDriver._get_tag_list\u001b[39m\u001b[34m(self, program)\u001b[39m\n\u001b[32m 436\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34m_get_tag_list\u001b[39m(\u001b[38;5;28mself\u001b[39m, program=\u001b[38;5;28;01mNone\u001b[39;00m):\n\u001b[32m 437\u001b[39m \u001b[38;5;28mself\u001b[39m.__log.info(\u001b[33mf\u001b[39m\u001b[33m'\u001b[39m\u001b[33mBeginning upload of \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mprogram\u001b[38;5;250m \u001b[39m\u001b[38;5;129;01mor\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[33m\"\u001b[39m\u001b[33mcontroller\u001b[39m\u001b[33m\"\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m tags...\u001b[39m\u001b[33m'\u001b[39m)\n\u001b[32m--> \u001b[39m\u001b[32m438\u001b[39m all_tags = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_get_instance_attribute_list_service\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprogram\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 439\u001b[39m \u001b[38;5;28mself\u001b[39m.__log.info(\u001b[33mf\u001b[39m\u001b[33m'\u001b[39m\u001b[33mCompleted upload of \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mprogram\u001b[38;5;250m \u001b[39m\u001b[38;5;129;01mor\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[33m\"\u001b[39m\u001b[33mcontroller\u001b[39m\u001b[33m\"\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m tags\u001b[39m\u001b[33m'\u001b[39m)\n\u001b[32m 440\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._isolate_user_tags(all_tags, program)\n", + "\u001b[36mFile \u001b[39m\u001b[32m~/miniconda3/envs/pycomm/lib/python3.13/site-packages/pycomm3/logix_driver.py:511\u001b[39m, in \u001b[36mLogixDriver._get_instance_attribute_list_service\u001b[39m\u001b[34m(self, program)\u001b[39m\n\u001b[32m 508\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m tag_list\n\u001b[32m 510\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m err:\n\u001b[32m--> \u001b[39m\u001b[32m511\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m ResponseError(\u001b[33m\"\u001b[39m\u001b[33mfailed to get attribute list\u001b[39m\u001b[33m\"\u001b[39m) \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01merr\u001b[39;00m\n", + "\u001b[31mResponseError\u001b[39m: failed to get attribute list" + ] + } + ], "source": [ "with LogixDriver(ip_address) as plc:\n", " #info = plc.get_plc_info()\n", diff --git a/code snippets/tag_dump.json b/code snippets/tag_dump.json index 39a67ba..d2e68be 100644 --- a/code snippets/tag_dump.json +++ b/code snippets/tag_dump.json @@ -5,9 +5,9 @@ "alias": true, "instance_id": 1, "symbol_address": 1044, - "symbol_object_address": 1077571048, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -24,9 +24,9 @@ "alias": true, "instance_id": 2, "symbol_address": 1045, - "symbol_object_address": 1077571048, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -43,9 +43,9 @@ "alias": true, "instance_id": 3, "symbol_address": 1046, - "symbol_object_address": 1077571048, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -62,9 +62,9 @@ "alias": true, "instance_id": 4, "symbol_address": 1047, - "symbol_object_address": 1077571048, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -81,9 +81,9 @@ "alias": true, "instance_id": 5, "symbol_address": 1048, - "symbol_object_address": 1077571048, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -100,9 +100,9 @@ "alias": true, "instance_id": 6, "symbol_address": 1049, - "symbol_object_address": 1077571048, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -119,9 +119,66 @@ "alias": true, "instance_id": 7, "symbol_address": 1050, - "symbol_object_address": 1077571048, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, + "tag_type": "atomic" + }, + "_IO_EM_DO_07": { + "tag_name": "_IO_EM_DO_07", + "dim": 0, + "alias": true, + "instance_id": 8, + "symbol_address": 1051, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, + "tag_type": "atomic" + }, + "_IO_EM_DO_08": { + "tag_name": "_IO_EM_DO_08", + "dim": 0, + "alias": true, + "instance_id": 9, + "symbol_address": 1052, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, + "tag_type": "atomic" + }, + "_IO_EM_DO_09": { + "tag_name": "_IO_EM_DO_09", + "dim": 0, + "alias": true, + "instance_id": 10, + "symbol_address": 1053, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -136,11 +193,11 @@ "tag_name": "_IO_EM_DI_00", "dim": 0, "alias": true, - "instance_id": 8, - "symbol_address": 1051, - "symbol_object_address": 1077571048, + "instance_id": 11, + "symbol_address": 1054, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -155,11 +212,11 @@ "tag_name": "_IO_EM_DI_01", "dim": 0, "alias": true, - "instance_id": 9, - "symbol_address": 1052, - "symbol_object_address": 1077571048, + "instance_id": 12, + "symbol_address": 1055, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -174,11 +231,11 @@ "tag_name": "_IO_EM_DI_02", "dim": 0, "alias": true, - "instance_id": 10, - "symbol_address": 1053, - "symbol_object_address": 1077571048, + "instance_id": 13, + "symbol_address": 1056, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -193,11 +250,11 @@ "tag_name": "_IO_EM_DI_03", "dim": 0, "alias": true, - "instance_id": 11, - "symbol_address": 1054, - "symbol_object_address": 1077571048, + "instance_id": 14, + "symbol_address": 1057, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -212,11 +269,11 @@ "tag_name": "_IO_EM_DI_04", "dim": 0, "alias": true, - "instance_id": 12, - "symbol_address": 1055, - "symbol_object_address": 1077571048, + "instance_id": 15, + "symbol_address": 1058, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -231,11 +288,11 @@ "tag_name": "_IO_EM_DI_05", "dim": 0, "alias": true, - "instance_id": 13, - "symbol_address": 1056, - "symbol_object_address": 1077571048, + "instance_id": 16, + "symbol_address": 1059, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -250,11 +307,11 @@ "tag_name": "_IO_EM_DI_06", "dim": 0, "alias": true, - "instance_id": 14, - "symbol_address": 1057, - "symbol_object_address": 1077571048, + "instance_id": 17, + "symbol_address": 1060, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -269,11 +326,11 @@ "tag_name": "_IO_EM_DI_07", "dim": 0, "alias": true, - "instance_id": 15, - "symbol_address": 1058, - "symbol_object_address": 1077571048, + "instance_id": 18, + "symbol_address": 1061, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -288,11 +345,11 @@ "tag_name": "_IO_EM_DI_08", "dim": 0, "alias": true, - "instance_id": 16, - "symbol_address": 1059, - "symbol_object_address": 1077571048, + "instance_id": 19, + "symbol_address": 1062, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -307,11 +364,11 @@ "tag_name": "_IO_EM_DI_09", "dim": 0, "alias": true, - "instance_id": 17, - "symbol_address": 1060, - "symbol_object_address": 1077571048, + "instance_id": 20, + "symbol_address": 1063, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -326,11 +383,11 @@ "tag_name": "_IO_EM_DI_10", "dim": 0, "alias": true, - "instance_id": 18, - "symbol_address": 1061, - "symbol_object_address": 1077571048, + "instance_id": 21, + "symbol_address": 1064, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -345,11 +402,11 @@ "tag_name": "_IO_EM_DI_11", "dim": 0, "alias": true, - "instance_id": 19, - "symbol_address": 1062, - "symbol_object_address": 1077571048, + "instance_id": 22, + "symbol_address": 1065, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, @@ -360,445 +417,503 @@ "bit_position": 0, "tag_type": "atomic" }, - "_IO_EM_AI_00": { - "tag_name": "_IO_EM_AI_00", - "dim": 0, - "alias": true, - "instance_id": 20, - "symbol_address": 1064, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "UINT", - "data_type_name": "UINT", - "tag_type": "atomic" - }, - "_IO_EM_AI_01": { - "tag_name": "_IO_EM_AI_01", - "dim": 0, - "alias": true, - "instance_id": 21, - "symbol_address": 1066, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "UINT", - "data_type_name": "UINT", - "tag_type": "atomic" - }, - "_IO_EM_AI_02": { - "tag_name": "_IO_EM_AI_02", - "dim": 0, - "alias": true, - "instance_id": 22, - "symbol_address": 1068, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "UINT", - "data_type_name": "UINT", - "tag_type": "atomic" - }, - "_IO_EM_AI_03": { - "tag_name": "_IO_EM_AI_03", + "_IO_EM_DI_12": { + "tag_name": "_IO_EM_DI_12", "dim": 0, "alias": true, "instance_id": 23, - "symbol_address": 1070, - "symbol_object_address": 1077571048, + "symbol_address": 1066, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "UINT", - "data_type_name": "UINT", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "_IO_EM_AO_00": { - "tag_name": "_IO_EM_AO_00", + "_IO_EM_DI_13": { + "tag_name": "_IO_EM_DI_13", "dim": 0, "alias": true, "instance_id": 24, - "symbol_address": 1072, - "symbol_object_address": 1077571048, + "symbol_address": 1067, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "UINT", - "data_type_name": "UINT", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "_IO_P1_AI_00": { - "tag_name": "_IO_P1_AI_00", + "_IO_X1_AI_00": { + "tag_name": "_IO_X1_AI_00", "dim": 0, "alias": true, "instance_id": 25, - "symbol_address": 1074, - "symbol_object_address": 1077571048, + "symbol_address": 1068, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "UINT", - "data_type_name": "UINT", + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "_IO_P1_AI_01": { - "tag_name": "_IO_P1_AI_01", + "_IO_X1_AI_01": { + "tag_name": "_IO_X1_AI_01", "dim": 0, "alias": true, "instance_id": 26, - "symbol_address": 1076, - "symbol_object_address": 1077571048, + "symbol_address": 1070, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "UINT", - "data_type_name": "UINT", + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Pump_Run": { - "tag_name": "Pump_Run", + "_IO_X1_AI_02": { + "tag_name": "_IO_X1_AI_02", "dim": 0, "alias": true, "instance_id": 27, - "symbol_address": 1116, - "symbol_object_address": 1077571048, + "symbol_address": 1072, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Raw_Hand_Input": { - "tag_name": "Raw_Hand_Input", + "_IO_X1_AI_03": { + "tag_name": "_IO_X1_AI_03", "dim": 0, "alias": true, "instance_id": 28, - "symbol_address": 1117, - "symbol_object_address": 1077571048, + "symbol_address": 1074, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Raw_Auto_Input": { - "tag_name": "Raw_Auto_Input", + "_IO_X1_AI_04": { + "tag_name": "_IO_X1_AI_04", "dim": 0, "alias": true, "instance_id": 29, - "symbol_address": 1118, - "symbol_object_address": 1077571048, + "symbol_address": 1076, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Raw_Run_Status": { - "tag_name": "Raw_Run_Status", + "_IO_X1_AI_05": { + "tag_name": "_IO_X1_AI_05", "dim": 0, "alias": true, "instance_id": 30, - "symbol_address": 1119, - "symbol_object_address": 1077571048, + "symbol_address": 1078, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Raw_Run_Indication": { - "tag_name": "Raw_Run_Indication", + "_IO_X1_AI_06": { + "tag_name": "_IO_X1_AI_06", "dim": 0, "alias": true, "instance_id": 31, - "symbol_address": 1120, - "symbol_object_address": 1077571048, + "symbol_address": 1080, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Raw_Local_Start": { - "tag_name": "Raw_Local_Start", + "_IO_X1_AI_07": { + "tag_name": "_IO_X1_AI_07", "dim": 0, "alias": true, "instance_id": 32, - "symbol_address": 1121, - "symbol_object_address": 1077571048, + "symbol_address": 1082, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "CMD_Cloud_Control": { - "tag_name": "CMD_Cloud_Control", + "_IO_X1_ST_00": { + "tag_name": "_IO_X1_ST_00", "dim": 0, "alias": true, "instance_id": 33, - "symbol_address": 1122, - "symbol_object_address": 1077571048, + "symbol_address": 1084, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Daily_GOP": { - "tag_name": "Daily_GOP", + "_IO_X1_ST_01": { + "tag_name": "_IO_X1_ST_01", "dim": 0, "alias": true, "instance_id": 34, - "symbol_address": 1123, - "symbol_object_address": 1077571048, + "symbol_address": 1086, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Monthly_GOP": { - "tag_name": "Monthly_GOP", + "_IO_X1_ST_02": { + "tag_name": "_IO_X1_ST_02", "dim": 0, "alias": true, "instance_id": 35, - "symbol_address": 1124, - "symbol_object_address": 1077571048, + "symbol_address": 1088, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "SPT_Flow_Meter_Unit": { - "tag_name": "SPT_Flow_Meter_Unit", + "_IO_X1_ST_03": { + "tag_name": "_IO_X1_ST_03", "dim": 0, "alias": true, "instance_id": 36, - "symbol_address": 1125, - "symbol_object_address": 1077571048, + "symbol_address": 1090, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Raw_Overload_Status": { - "tag_name": "Raw_Overload_Status", + "_IO_X1_ST_04": { + "tag_name": "_IO_X1_ST_04", "dim": 0, "alias": true, "instance_id": 37, - "symbol_address": 1126, - "symbol_object_address": 1077571048, + "symbol_address": 1092, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Test_GOP": { - "tag_name": "Test_GOP", + "_IO_X2_AO_00": { + "tag_name": "_IO_X2_AO_00", "dim": 0, "alias": true, "instance_id": 38, - "symbol_address": 1127, - "symbol_object_address": 1077571048, + "symbol_address": 1094, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Pump_Signal_Run": { - "tag_name": "Pump_Signal_Run", + "_IO_X2_AO_01": { + "tag_name": "_IO_X2_AO_01", "dim": 0, "alias": true, "instance_id": 39, - "symbol_address": 1128, - "symbol_object_address": 1077571048, + "symbol_address": 1096, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Test_Daily_GOP": { - "tag_name": "Test_Daily_GOP", + "_IO_X2_AO_02": { + "tag_name": "_IO_X2_AO_02", "dim": 0, "alias": true, "instance_id": 40, - "symbol_address": 1129, - "symbol_object_address": 1077571048, + "symbol_address": 1098, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "FlowRateSel": { - "tag_name": "FlowRateSel", + "_IO_X2_AO_03": { + "tag_name": "_IO_X2_AO_03", "dim": 0, "alias": true, "instance_id": 41, - "symbol_address": 1130, - "symbol_object_address": 1077571048, + "symbol_address": 1100, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "FlowRateSwitch": { - "tag_name": "FlowRateSwitch", + "_IO_X2_CO_00": { + "tag_name": "_IO_X2_CO_00", "dim": 0, "alias": true, "instance_id": 42, - "symbol_address": 1131, - "symbol_object_address": 1077571048, + "symbol_address": 1102, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "BOOL", - "data_type_name": "BOOL", - "bit_position": 0, + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "MasterTotalizerReset": { - "tag_name": "MasterTotalizerReset", + "_IO_X2_ST_00": { + "tag_name": "_IO_X2_ST_00", "dim": 0, "alias": true, "instance_id": 43, - "symbol_address": 1132, - "symbol_object_address": 1077571048, + "symbol_address": 1104, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read Only", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "INT", + "data_type_name": "INT", + "tag_type": "atomic" + }, + "_IO_X2_ST_01": { + "tag_name": "_IO_X2_ST_01", + "dim": 0, + "alias": true, + "instance_id": 44, + "symbol_address": 1106, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read Only", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "INT", + "data_type_name": "INT", + "tag_type": "atomic" + }, + "_IO_X2_ST_02": { + "tag_name": "_IO_X2_ST_02", + "dim": 0, + "alias": true, + "instance_id": 45, + "symbol_address": 1108, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read Only", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "INT", + "data_type_name": "INT", + "tag_type": "atomic" + }, + "_IO_X2_ST_03": { + "tag_name": "_IO_X2_ST_03", + "dim": 0, + "alias": true, + "instance_id": 46, + "symbol_address": 1110, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read Only", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "INT", + "data_type_name": "INT", + "tag_type": "atomic" + }, + "_IO_X2_ST_04": { + "tag_name": "_IO_X2_ST_04", + "dim": 0, + "alias": true, + "instance_id": 47, + "symbol_address": 1112, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read Only", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "INT", + "data_type_name": "INT", + "tag_type": "atomic" + }, + "_IO_X2_ST_05": { + "tag_name": "_IO_X2_ST_05", + "dim": 0, + "alias": true, + "instance_id": 48, + "symbol_address": 1114, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read Only", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "INT", + "data_type_name": "INT", + "tag_type": "atomic" + }, + "_IO_X2_ST_06": { + "tag_name": "_IO_X2_ST_06", + "dim": 0, + "alias": true, + "instance_id": 49, + "symbol_address": 1116, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read Only", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "INT", + "data_type_name": "INT", + "tag_type": "atomic" + }, + "Raw_Power_Supply_Status": { + "tag_name": "Raw_Power_Supply_Status", + "dim": 0, + "alias": true, + "instance_id": 50, + "symbol_address": 1196, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -809,807 +924,712 @@ "bit_position": 0, "tag_type": "atomic" }, - "Raw_Flow_Meter": { - "tag_name": "Raw_Flow_Meter", - "dim": 0, - "alias": true, - "instance_id": 44, - "symbol_address": 1134, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "UINT", - "data_type_name": "UINT", - "tag_type": "atomic" - }, - "Raw_Pressure_Transducer": { - "tag_name": "Raw_Pressure_Transducer", - "dim": 0, - "alias": true, - "instance_id": 45, - "symbol_address": 1136, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "UINT", - "data_type_name": "UINT", - "tag_type": "atomic" - }, - "MB_BatteyStatus": { - "tag_name": "MB_BatteyStatus", - "dim": 0, - "alias": true, - "instance_id": 46, - "symbol_address": 1138, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "UINT", - "data_type_name": "UINT", - "tag_type": "atomic" - }, - "Contract_Hour": { - "tag_name": "Contract_Hour", - "dim": 0, - "alias": true, - "instance_id": 47, - "symbol_address": 1140, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "UINT", - "data_type_name": "UINT", - "tag_type": "atomic" - }, - "Zero": { - "tag_name": "Zero", - "dim": 0, - "alias": true, - "instance_id": 48, - "symbol_address": 1148, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "REAL", - "data_type_name": "REAL", - "tag_type": "atomic" - }, - "Scaled_Flow_Meter": { - "tag_name": "Scaled_Flow_Meter", - "dim": 0, - "alias": true, - "instance_id": 49, - "symbol_address": 1152, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "REAL", - "data_type_name": "REAL", - "tag_type": "atomic" - }, - "Scaled_Pressure_Transducer": { - "tag_name": "Scaled_Pressure_Transducer", - "dim": 0, - "alias": true, - "instance_id": 50, - "symbol_address": 1156, - "symbol_object_address": 1077571048, - "software_control": 0, - "external_access": "Unknown", - "dimensions": [ - 0, - 0, - 0 - ], - "data_type": "REAL", - "data_type_name": "REAL", - "tag_type": "atomic" - }, - "Lifetime_Flow_Meter_Gal": { - "tag_name": "Lifetime_Flow_Meter_Gal", + "Raw_UPS_Status": { + "tag_name": "Raw_UPS_Status", "dim": 0, "alias": true, "instance_id": 51, - "symbol_address": 1160, - "symbol_object_address": 1077571048, + "symbol_address": 1197, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Totalizer_FM_Yesterday_Total_Gal": { - "tag_name": "Totalizer_FM_Yesterday_Total_Gal", + "CMD_Lights": { + "tag_name": "CMD_Lights", "dim": 0, "alias": true, "instance_id": 52, - "symbol_address": 1164, - "symbol_object_address": 1077571048, + "symbol_address": 1198, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Totalizer_FM_Current_Day_Total_Gal": { - "tag_name": "Totalizer_FM_Current_Day_Total_Gal", + "WTP1_Run_CMD": { + "tag_name": "WTP1_Run_CMD", "dim": 0, "alias": true, "instance_id": 53, - "symbol_address": 1168, - "symbol_object_address": 1077571048, + "symbol_address": 1199, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Totalizer_FM_Last_Month_Gal": { - "tag_name": "Totalizer_FM_Last_Month_Gal", + "CMD_WTP1_Stop_PB": { + "tag_name": "CMD_WTP1_Stop_PB", "dim": 0, "alias": true, "instance_id": 54, - "symbol_address": 1172, - "symbol_object_address": 1077571048, + "symbol_address": 1200, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Totalizer_FM_Current_Month_Gal": { - "tag_name": "Totalizer_FM_Current_Month_Gal", + "CMD_WTP1_Start_PB": { + "tag_name": "CMD_WTP1_Start_PB", "dim": 0, "alias": true, "instance_id": 55, - "symbol_address": 1176, - "symbol_object_address": 1077571048, + "symbol_address": 1201, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Totalizer_FM_Yesterday_Total_BBLs": { - "tag_name": "Totalizer_FM_Yesterday_Total_BBLs", + "CMD_WTP2_Start_PB": { + "tag_name": "CMD_WTP2_Start_PB", "dim": 0, "alias": true, "instance_id": 56, - "symbol_address": 1180, - "symbol_object_address": 1077571048, + "symbol_address": 1202, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Totalizer_FM_Current_Day_Total_BBLs": { - "tag_name": "Totalizer_FM_Current_Day_Total_BBLs", + "WTP2_Run_CMD": { + "tag_name": "WTP2_Run_CMD", "dim": 0, "alias": true, "instance_id": 57, - "symbol_address": 1184, - "symbol_object_address": 1077571048, + "symbol_address": 1203, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Totalizer_FM_Last_Month_BBLs": { - "tag_name": "Totalizer_FM_Last_Month_BBLs", + "CMD_WTP2_Stop_PB": { + "tag_name": "CMD_WTP2_Stop_PB", "dim": 0, "alias": true, "instance_id": 58, - "symbol_address": 1188, - "symbol_object_address": 1077571048, + "symbol_address": 1204, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Today_Total": { - "tag_name": "Today_Total", + "Sts_Either_WTP_Run": { + "tag_name": "Sts_Either_WTP_Run", "dim": 0, "alias": true, "instance_id": 59, - "symbol_address": 1192, - "symbol_object_address": 1077571048, + "symbol_address": 1205, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Totalizer_FM_Current_Month_BBLs": { - "tag_name": "Totalizer_FM_Current_Month_BBLs", + "AL0_Air_Compressor_Lo_Discharge_PSI": { + "tag_name": "AL0_Air_Compressor_Lo_Discharge_PSI", "dim": 0, "alias": true, "instance_id": 60, - "symbol_address": 1196, - "symbol_object_address": 1077571048, + "symbol_address": 1206, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Prev_Day_Total": { - "tag_name": "Prev_Day_Total", + "CMD_WTP1_SS_ClearFault": { + "tag_name": "CMD_WTP1_SS_ClearFault", "dim": 0, "alias": true, "instance_id": 61, - "symbol_address": 1200, - "symbol_object_address": 1077571048, + "symbol_address": 1207, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Month_Total": { - "tag_name": "Month_Total", + "CMD_WTP1_SS_Run": { + "tag_name": "CMD_WTP1_SS_Run", "dim": 0, "alias": true, "instance_id": 62, - "symbol_address": 1204, - "symbol_object_address": 1077571048, + "symbol_address": 1208, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Lifetime_Flow_Meter_BBLS": { - "tag_name": "Lifetime_Flow_Meter_BBLS", + "CMD_WTP2_SS_ClearFault": { + "tag_name": "CMD_WTP2_SS_ClearFault", "dim": 0, "alias": true, "instance_id": 63, - "symbol_address": 1208, - "symbol_object_address": 1077571048, + "symbol_address": 1209, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Prev_Month_Total": { - "tag_name": "Prev_Month_Total", + "CMD_WTP2_SS_Run": { + "tag_name": "CMD_WTP2_SS_Run", "dim": 0, "alias": true, "instance_id": 64, - "symbol_address": 1212, - "symbol_object_address": 1077571048, + "symbol_address": 1210, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Today_Total_Gallons": { - "tag_name": "Today_Total_Gallons", + "CMD_Lact_Run": { + "tag_name": "CMD_Lact_Run", "dim": 0, "alias": true, "instance_id": 65, - "symbol_address": 1216, - "symbol_object_address": 1077571048, + "symbol_address": 1211, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Low_Flow_Cutoff": { - "tag_name": "Low_Flow_Cutoff", + "AL0_WTP1_SS_Faulted": { + "tag_name": "AL0_WTP1_SS_Faulted", "dim": 0, "alias": true, "instance_id": 66, - "symbol_address": 1220, - "symbol_object_address": 1077571048, + "symbol_address": 1212, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Seconds_Per_Minute": { - "tag_name": "Seconds_Per_Minute", + "AL0_WTP2_SS_Faulted": { + "tag_name": "AL0_WTP2_SS_Faulted", "dim": 0, "alias": true, "instance_id": 67, - "symbol_address": 1224, - "symbol_object_address": 1077571048, + "symbol_address": 1213, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Today_Total_BBLs": { - "tag_name": "Today_Total_BBLs", + "FBK_WTP1_SS_Running": { + "tag_name": "FBK_WTP1_SS_Running", "dim": 0, "alias": true, "instance_id": 68, - "symbol_address": 1228, - "symbol_object_address": 1077571048, + "symbol_address": 1214, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Prev_Day_Total_Gals": { - "tag_name": "Prev_Day_Total_Gals", + "FBK_WTP2_SS_Running": { + "tag_name": "FBK_WTP2_SS_Running", "dim": 0, "alias": true, "instance_id": 69, - "symbol_address": 1232, - "symbol_object_address": 1077571048, + "symbol_address": 1215, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Prev_Day_Total_BBLs": { - "tag_name": "Prev_Day_Total_BBLs", + "CMD_Manual_Lact_Run": { + "tag_name": "CMD_Manual_Lact_Run", "dim": 0, "alias": true, "instance_id": 70, - "symbol_address": 1236, - "symbol_object_address": 1077571048, + "symbol_address": 1216, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Month_Total_Gallons": { - "tag_name": "Month_Total_Gallons", + "CMD_WTP1_PID_Auto": { + "tag_name": "CMD_WTP1_PID_Auto", "dim": 0, "alias": true, "instance_id": 71, - "symbol_address": 1240, - "symbol_object_address": 1077571048, + "symbol_address": 1217, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Prev_Month_Total_Gallons": { - "tag_name": "Prev_Month_Total_Gallons", + "CMD_WTP2_PID_Auto": { + "tag_name": "CMD_WTP2_PID_Auto", "dim": 0, "alias": true, "instance_id": 72, - "symbol_address": 1244, - "symbol_object_address": 1077571048, + "symbol_address": 1218, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Month_Total_Barrels": { - "tag_name": "Month_Total_Barrels", + "CMD_WTP1_PID_Manual": { + "tag_name": "CMD_WTP1_PID_Manual", "dim": 0, "alias": true, "instance_id": 73, - "symbol_address": 1248, - "symbol_object_address": 1077571048, + "symbol_address": 1219, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Prev_Month_Total_Barrels": { - "tag_name": "Prev_Month_Total_Barrels", + "CMD_WTP2_PID_Manual": { + "tag_name": "CMD_WTP2_PID_Manual", "dim": 0, "alias": true, "instance_id": 74, - "symbol_address": 1252, - "symbol_object_address": 1077571048, + "symbol_address": 1220, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Sec_Per_Min": { - "tag_name": "Sec_Per_Min", + "AL0_Lact_Unit_Faulted": { + "tag_name": "AL0_Lact_Unit_Faulted", "dim": 0, "alias": true, "instance_id": 75, - "symbol_address": 1256, - "symbol_object_address": 1077571048, + "symbol_address": 1221, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "MB_FlowRate": { - "tag_name": "MB_FlowRate", + "FBK_Lact_Unit_Running": { + "tag_name": "FBK_Lact_Unit_Running", "dim": 0, "alias": true, "instance_id": 76, - "symbol_address": 1260, - "symbol_object_address": 1077571048, + "symbol_address": 1222, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "Val_Flow_Rate": { - "tag_name": "Val_Flow_Rate", + "AL0_ST1_High_Level": { + "tag_name": "AL0_ST1_High_Level", "dim": 0, "alias": true, "instance_id": 77, - "symbol_address": 1264, - "symbol_object_address": 1077571048, + "symbol_address": 1223, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "MB_PrevDayTotal": { - "tag_name": "MB_PrevDayTotal", + "AL0_OT2_High_Level": { + "tag_name": "AL0_OT2_High_Level", "dim": 0, "alias": true, "instance_id": 78, - "symbol_address": 1268, - "symbol_object_address": 1077571048, + "symbol_address": 1224, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "MB_CurrentDayTotal": { - "tag_name": "MB_CurrentDayTotal", + "AL0_OT1_High_Level": { + "tag_name": "AL0_OT1_High_Level", "dim": 0, "alias": true, "instance_id": 79, - "symbol_address": 1272, - "symbol_object_address": 1077571048, + "symbol_address": 1225, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "MB_Total": { - "tag_name": "MB_Total", + "AL0_WT2_High_Level": { + "tag_name": "AL0_WT2_High_Level", "dim": 0, "alias": true, "instance_id": 80, - "symbol_address": 1276, - "symbol_object_address": 1077571048, + "symbol_address": 1226, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "MB_AccumlatedTotal": { - "tag_name": "MB_AccumlatedTotal", + "AL0_WT1_High_Level": { + "tag_name": "AL0_WT1_High_Level", "dim": 0, "alias": true, "instance_id": 81, - "symbol_address": 1280, - "symbol_object_address": 1077571048, + "symbol_address": 1227, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "BOOL", + "data_type_name": "BOOL", + "bit_position": 0, "tag_type": "atomic" }, - "TempMB_FlowRate": { - "tag_name": "TempMB_FlowRate", + "Current_Seconds": { + "tag_name": "Current_Seconds", "dim": 0, "alias": true, "instance_id": 82, - "symbol_address": 1284, - "symbol_object_address": 1077571048, + "symbol_address": 1230, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Flow_Rate_Holding": { - "tag_name": "Flow_Rate_Holding", + "Current_Minutes": { + "tag_name": "Current_Minutes", "dim": 0, "alias": true, "instance_id": 83, - "symbol_address": 1288, - "symbol_object_address": 1077571048, + "symbol_address": 1232, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Val_Lifetime_Total": { - "tag_name": "Val_Lifetime_Total", + "Current_Hour": { + "tag_name": "Current_Hour", "dim": 0, "alias": true, "instance_id": 84, - "symbol_address": 1292, - "symbol_object_address": 1077571048, + "symbol_address": 1234, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Val_Water_Yest_Total": { - "tag_name": "Val_Water_Yest_Total", + "Current_Day": { + "tag_name": "Current_Day", "dim": 0, "alias": true, "instance_id": 85, - "symbol_address": 1296, - "symbol_object_address": 1077571048, + "symbol_address": 1236, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Val_Water_Today_Total": { - "tag_name": "Val_Water_Today_Total", + "Water_Contract_Hour": { + "tag_name": "Water_Contract_Hour", "dim": 0, "alias": true, "instance_id": 86, - "symbol_address": 1300, - "symbol_object_address": 1077571048, + "symbol_address": 1238, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Val_Water_LastMonth_Total": { - "tag_name": "Val_Water_LastMonth_Total", + "Oil_Contract_Hour": { + "tag_name": "Oil_Contract_Hour", "dim": 0, "alias": true, "instance_id": 87, - "symbol_address": 1304, - "symbol_object_address": 1077571048, + "symbol_address": 1240, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "REAL", - "data_type_name": "REAL", + "data_type": "INT", + "data_type_name": "INT", "tag_type": "atomic" }, - "Val_Water_Current_Month_Total": { - "tag_name": "Val_Water_Current_Month_Total", + "Raw_WTP1_Discharge_PSI": { + "tag_name": "Raw_WTP1_Discharge_PSI", "dim": 0, "alias": true, "instance_id": 88, - "symbol_address": 1308, - "symbol_object_address": 1077571048, + "symbol_address": 1248, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -1619,15 +1639,15 @@ "data_type_name": "REAL", "tag_type": "atomic" }, - "Val_Oil_Yest_Total": { - "tag_name": "Val_Oil_Yest_Total", + "Raw_WTP2_Discharge_PSI": { + "tag_name": "Raw_WTP2_Discharge_PSI", "dim": 0, "alias": true, "instance_id": 89, - "symbol_address": 1312, - "symbol_object_address": 1077571048, + "symbol_address": 1252, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -1637,15 +1657,15 @@ "data_type_name": "REAL", "tag_type": "atomic" }, - "Val_Oil_Today_Total": { - "tag_name": "Val_Oil_Today_Total", + "CMD_Oil_Working_Tank_Level": { + "tag_name": "CMD_Oil_Working_Tank_Level", "dim": 0, "alias": true, "instance_id": 90, - "symbol_address": 1316, - "symbol_object_address": 1077571048, + "symbol_address": 1256, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -1655,15 +1675,15 @@ "data_type_name": "REAL", "tag_type": "atomic" }, - "Val_Oil_LastMonth_Total": { - "tag_name": "Val_Oil_LastMonth_Total", + "CFG_WTP2_InRawMax": { + "tag_name": "CFG_WTP2_InRawMax", "dim": 0, "alias": true, "instance_id": 91, - "symbol_address": 1320, - "symbol_object_address": 1077571048, + "symbol_address": 1260, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -1673,15 +1693,15 @@ "data_type_name": "REAL", "tag_type": "atomic" }, - "Val_Oil_CurrentMonth": { - "tag_name": "Val_Oil_CurrentMonth", + "CFG_WTP2_InRawMin": { + "tag_name": "CFG_WTP2_InRawMin", "dim": 0, "alias": true, "instance_id": 92, - "symbol_address": 1324, - "symbol_object_address": 1077571048, + "symbol_address": 1264, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -1691,15 +1711,15 @@ "data_type_name": "REAL", "tag_type": "atomic" }, - "Temp_Lifetime_Holding": { - "tag_name": "Temp_Lifetime_Holding", + "CFG_WTP2_InEUMax": { + "tag_name": "CFG_WTP2_InEUMax", "dim": 0, "alias": true, "instance_id": 93, - "symbol_address": 1328, - "symbol_object_address": 1077571048, + "symbol_address": 1268, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, @@ -1709,22 +1729,2812 @@ "data_type_name": "REAL", "tag_type": "atomic" }, - "Sample_Period": { - "tag_name": "Sample_Period", + "CFG_WTP2_InEUMin": { + "tag_name": "CFG_WTP2_InEUMin", "dim": 0, "alias": true, "instance_id": 94, - "symbol_address": 1540, - "symbol_object_address": 1077571048, + "symbol_address": 1272, + "symbol_object_address": 1613778336, "software_control": 0, - "external_access": "Unknown", + "external_access": "Read/Write", "dimensions": [ 0, 0, 0 ], - "data_type": "UDINT", - "data_type_name": "UDINT", + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP2_Discharge_PSI_Scaled": { + "tag_name": "Val_WTP2_Discharge_PSI_Scaled", + "dim": 0, + "alias": true, + "instance_id": 95, + "symbol_address": 1276, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "CFG_WTP1_InEUMin": { + "tag_name": "CFG_WTP1_InEUMin", + "dim": 0, + "alias": true, + "instance_id": 96, + "symbol_address": 1280, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "CFG_WTP1_InEUMax": { + "tag_name": "CFG_WTP1_InEUMax", + "dim": 0, + "alias": true, + "instance_id": 97, + "symbol_address": 1284, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "CFG_WTP1_InRawMin": { + "tag_name": "CFG_WTP1_InRawMin", + "dim": 0, + "alias": true, + "instance_id": 98, + "symbol_address": 1288, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "CFG_WTP1_InRawMax": { + "tag_name": "CFG_WTP1_InRawMax", + "dim": 0, + "alias": true, + "instance_id": 99, + "symbol_address": 1292, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP1_Discharge_PSI_Scaled": { + "tag_name": "Val_WTP1_Discharge_PSI_Scaled", + "dim": 0, + "alias": true, + "instance_id": 100, + "symbol_address": 1296, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP1_FR": { + "tag_name": "Val_WTP1_FR", + "dim": 0, + "alias": true, + "instance_id": 101, + "symbol_address": 1300, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP1_T1": { + "tag_name": "Val_WTP1_T1", + "dim": 0, + "alias": true, + "instance_id": 102, + "symbol_address": 1304, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WT1_PV": { + "tag_name": "Val_WT1_PV", + "dim": 0, + "alias": true, + "instance_id": 103, + "symbol_address": 1308, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WT1_SV": { + "tag_name": "Val_WT1_SV", + "dim": 0, + "alias": true, + "instance_id": 104, + "symbol_address": 1312, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP1_Monthly_Total": { + "tag_name": "Val_WTP1_Monthly_Total", + "dim": 0, + "alias": true, + "instance_id": 105, + "symbol_address": 1316, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP1_LastMonth_Total": { + "tag_name": "Val_WTP1_LastMonth_Total", + "dim": 0, + "alias": true, + "instance_id": 106, + "symbol_address": 1320, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP1_Todays_Total": { + "tag_name": "Val_WTP1_Todays_Total", + "dim": 0, + "alias": true, + "instance_id": 107, + "symbol_address": 1324, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP1_Yest_Total": { + "tag_name": "Val_WTP1_Yest_Total", + "dim": 0, + "alias": true, + "instance_id": 108, + "symbol_address": 1328, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Oil_FR": { + "tag_name": "Val_Vessel_1_Oil_FR", + "dim": 0, + "alias": true, + "instance_id": 109, + "symbol_address": 1332, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Oil_T1": { + "tag_name": "Val_Vessel_1_Oil_T1", + "dim": 0, + "alias": true, + "instance_id": 110, + "symbol_address": 1336, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Water_FR": { + "tag_name": "Val_Vessel_1_Water_FR", + "dim": 0, + "alias": true, + "instance_id": 111, + "symbol_address": 1340, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Water_T1": { + "tag_name": "Val_Vessel_1_Water_T1", + "dim": 0, + "alias": true, + "instance_id": 112, + "symbol_address": 1344, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Oil_FR": { + "tag_name": "Val_Vessel_2_Oil_FR", + "dim": 0, + "alias": true, + "instance_id": 113, + "symbol_address": 1348, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Oil_T1": { + "tag_name": "Val_Vessel_2_Oil_T1", + "dim": 0, + "alias": true, + "instance_id": 114, + "symbol_address": 1352, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Water_FR": { + "tag_name": "Val_Vessel_2_Water_FR", + "dim": 0, + "alias": true, + "instance_id": 115, + "symbol_address": 1356, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Water_T1": { + "tag_name": "Val_Vessel_2_Water_T1", + "dim": 0, + "alias": true, + "instance_id": 116, + "symbol_address": 1360, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Oil_FR": { + "tag_name": "Val_Vessel_3_Oil_FR", + "dim": 0, + "alias": true, + "instance_id": 117, + "symbol_address": 1364, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Oil_T1": { + "tag_name": "Val_Vessel_3_Oil_T1", + "dim": 0, + "alias": true, + "instance_id": 118, + "symbol_address": 1368, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Water_T1": { + "tag_name": "Val_Vessel_3_Water_T1", + "dim": 0, + "alias": true, + "instance_id": 119, + "symbol_address": 1372, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Water_FR": { + "tag_name": "Val_Vessel_3_Water_FR", + "dim": 0, + "alias": true, + "instance_id": 120, + "symbol_address": 1376, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Oil_Month_Total": { + "tag_name": "Val_Vessel_1_Oil_Month_Total", + "dim": 0, + "alias": true, + "instance_id": 121, + "symbol_address": 1380, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Oil_LastMonth_Total": { + "tag_name": "Val_Vessel_1_Oil_LastMonth_Total", + "dim": 0, + "alias": true, + "instance_id": 122, + "symbol_address": 1384, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Oil_Todays_Total": { + "tag_name": "Val_Vessel_1_Oil_Todays_Total", + "dim": 0, + "alias": true, + "instance_id": 123, + "symbol_address": 1388, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Oil_Yest_Total": { + "tag_name": "Val_Vessel_1_Oil_Yest_Total", + "dim": 0, + "alias": true, + "instance_id": 124, + "symbol_address": 1392, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Water_Month_Total": { + "tag_name": "Val_Vessel_1_Water_Month_Total", + "dim": 0, + "alias": true, + "instance_id": 125, + "symbol_address": 1396, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Water_LastMonth_Total": { + "tag_name": "Val_Vessel_1_Water_LastMonth_Total", + "dim": 0, + "alias": true, + "instance_id": 126, + "symbol_address": 1400, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Water_Todays_Total": { + "tag_name": "Val_Vessel_1_Water_Todays_Total", + "dim": 0, + "alias": true, + "instance_id": 127, + "symbol_address": 1404, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_1_Water_Yest_Total": { + "tag_name": "Val_Vessel_1_Water_Yest_Total", + "dim": 0, + "alias": true, + "instance_id": 128, + "symbol_address": 1408, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Oil_Month_Total": { + "tag_name": "Val_Vessel_2_Oil_Month_Total", + "dim": 0, + "alias": true, + "instance_id": 129, + "symbol_address": 1412, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Oil_LastMonth_Total": { + "tag_name": "Val_Vessel_2_Oil_LastMonth_Total", + "dim": 0, + "alias": true, + "instance_id": 130, + "symbol_address": 1416, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Oil_Todays_Total": { + "tag_name": "Val_Vessel_2_Oil_Todays_Total", + "dim": 0, + "alias": true, + "instance_id": 131, + "symbol_address": 1420, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Oil_Yest_Total": { + "tag_name": "Val_Vessel_2_Oil_Yest_Total", + "dim": 0, + "alias": true, + "instance_id": 132, + "symbol_address": 1424, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Water_Month_Total": { + "tag_name": "Val_Vessel_2_Water_Month_Total", + "dim": 0, + "alias": true, + "instance_id": 133, + "symbol_address": 1428, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Water_LastMonth_Total": { + "tag_name": "Val_Vessel_2_Water_LastMonth_Total", + "dim": 0, + "alias": true, + "instance_id": 134, + "symbol_address": 1432, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Water_Todays_Total": { + "tag_name": "Val_Vessel_2_Water_Todays_Total", + "dim": 0, + "alias": true, + "instance_id": 135, + "symbol_address": 1436, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_2_Water_Yest_Total": { + "tag_name": "Val_Vessel_2_Water_Yest_Total", + "dim": 0, + "alias": true, + "instance_id": 136, + "symbol_address": 1440, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Oil_Month_Total": { + "tag_name": "Val_Vessel_3_Oil_Month_Total", + "dim": 0, + "alias": true, + "instance_id": 137, + "symbol_address": 1444, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Oil_LastMonth_Total": { + "tag_name": "Val_Vessel_3_Oil_LastMonth_Total", + "dim": 0, + "alias": true, + "instance_id": 138, + "symbol_address": 1448, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Oil_Todays_Total": { + "tag_name": "Val_Vessel_3_Oil_Todays_Total", + "dim": 0, + "alias": true, + "instance_id": 139, + "symbol_address": 1452, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Oil_Yest_Total": { + "tag_name": "Val_Vessel_3_Oil_Yest_Total", + "dim": 0, + "alias": true, + "instance_id": 140, + "symbol_address": 1456, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Water_Month_Total": { + "tag_name": "Val_Vessel_3_Water_Month_Total", + "dim": 0, + "alias": true, + "instance_id": 141, + "symbol_address": 1460, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Water_LastMonth_Total": { + "tag_name": "Val_Vessel_3_Water_LastMonth_Total", + "dim": 0, + "alias": true, + "instance_id": 142, + "symbol_address": 1464, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Water_Todays_Total": { + "tag_name": "Val_Vessel_3_Water_Todays_Total", + "dim": 0, + "alias": true, + "instance_id": 143, + "symbol_address": 1468, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Vessel_3_Water_Yest_Total": { + "tag_name": "Val_Vessel_3_Water_Yest_Total", + "dim": 0, + "alias": true, + "instance_id": 144, + "symbol_address": 1472, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_ST1_SV": { + "tag_name": "Val_ST1_SV", + "dim": 0, + "alias": true, + "instance_id": 145, + "symbol_address": 1476, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_ST1_PV": { + "tag_name": "Val_ST1_PV", + "dim": 0, + "alias": true, + "instance_id": 146, + "symbol_address": 1480, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_OT2_SV": { + "tag_name": "Val_OT2_SV", + "dim": 0, + "alias": true, + "instance_id": 147, + "symbol_address": 1484, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_OT2_PV": { + "tag_name": "Val_OT2_PV", + "dim": 0, + "alias": true, + "instance_id": 148, + "symbol_address": 1488, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_OT1_SV": { + "tag_name": "Val_OT1_SV", + "dim": 0, + "alias": true, + "instance_id": 149, + "symbol_address": 1492, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_OT1_PV": { + "tag_name": "Val_OT1_PV", + "dim": 0, + "alias": true, + "instance_id": 150, + "symbol_address": 1496, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WT2_SV": { + "tag_name": "Val_WT2_SV", + "dim": 0, + "alias": true, + "instance_id": 151, + "symbol_address": 1500, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WT2_PV": { + "tag_name": "Val_WT2_PV", + "dim": 0, + "alias": true, + "instance_id": 152, + "symbol_address": 1504, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Working_Oil_Tank_Level": { + "tag_name": "Working_Oil_Tank_Level", + "dim": 0, + "alias": true, + "instance_id": 153, + "symbol_address": 1508, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Working_Water_Tank_Level": { + "tag_name": "Working_Water_Tank_Level", + "dim": 0, + "alias": true, + "instance_id": 154, + "symbol_address": 1512, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP1_Start": { + "tag_name": "SPT_WTP1_Start", + "dim": 0, + "alias": true, + "instance_id": 155, + "symbol_address": 1516, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP1_Stop": { + "tag_name": "SPT_WTP1_Stop", + "dim": 0, + "alias": true, + "instance_id": 156, + "symbol_address": 1520, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP1_Hi_Discharge": { + "tag_name": "SPT_WTP1_Hi_Discharge", + "dim": 0, + "alias": true, + "instance_id": 157, + "symbol_address": 1524, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP2_Start": { + "tag_name": "SPT_WTP2_Start", + "dim": 0, + "alias": true, + "instance_id": 158, + "symbol_address": 1528, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP2_Stop": { + "tag_name": "SPT_WTP2_Stop", + "dim": 0, + "alias": true, + "instance_id": 159, + "symbol_address": 1532, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP2_Hi_Discharge": { + "tag_name": "SPT_WTP2_Hi_Discharge", + "dim": 0, + "alias": true, + "instance_id": 160, + "symbol_address": 1536, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Sales_SP": { + "tag_name": "Val_Gas_Sales_SP", + "dim": 0, + "alias": true, + "instance_id": 161, + "symbol_address": 1540, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Sales_DP": { + "tag_name": "Val_Gas_Sales_DP", + "dim": 0, + "alias": true, + "instance_id": 162, + "symbol_address": 1544, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Sales_Temp": { + "tag_name": "Val_Gas_Sales_Temp", + "dim": 0, + "alias": true, + "instance_id": 163, + "symbol_address": 1548, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Sales_FR": { + "tag_name": "Val_Gas_Sales_FR", + "dim": 0, + "alias": true, + "instance_id": 164, + "symbol_address": 1552, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Sales_Today": { + "tag_name": "Val_Gas_Sales_Today", + "dim": 0, + "alias": true, + "instance_id": 165, + "symbol_address": 1556, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Sales_Yesterday": { + "tag_name": "Val_Gas_Sales_Yesterday", + "dim": 0, + "alias": true, + "instance_id": 166, + "symbol_address": 1560, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Sales_Lifetime": { + "tag_name": "Val_Gas_Sales_Lifetime", + "dim": 0, + "alias": true, + "instance_id": 167, + "symbol_address": 1564, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_Lifetime": { + "tag_name": "Val_Gas_Vessel_3_Lifetime", + "dim": 0, + "alias": true, + "instance_id": 168, + "symbol_address": 1568, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_Yesterday": { + "tag_name": "Val_Gas_Vessel_3_Yesterday", + "dim": 0, + "alias": true, + "instance_id": 169, + "symbol_address": 1572, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_Today": { + "tag_name": "Val_Gas_Vessel_3_Today", + "dim": 0, + "alias": true, + "instance_id": 170, + "symbol_address": 1576, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_FR": { + "tag_name": "Val_Gas_Vessel_3_FR", + "dim": 0, + "alias": true, + "instance_id": 171, + "symbol_address": 1580, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_Temp": { + "tag_name": "Val_Gas_Vessel_3_Temp", + "dim": 0, + "alias": true, + "instance_id": 172, + "symbol_address": 1584, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_DP": { + "tag_name": "Val_Gas_Vessel_3_DP", + "dim": 0, + "alias": true, + "instance_id": 173, + "symbol_address": 1588, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_SP": { + "tag_name": "Val_Gas_Vessel_3_SP", + "dim": 0, + "alias": true, + "instance_id": 174, + "symbol_address": 1592, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_Lifetime": { + "tag_name": "Val_Gas_Vessel_2_Lifetime", + "dim": 0, + "alias": true, + "instance_id": 175, + "symbol_address": 1596, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_Yesterday": { + "tag_name": "Val_Gas_Vessel_2_Yesterday", + "dim": 0, + "alias": true, + "instance_id": 176, + "symbol_address": 1600, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_Today": { + "tag_name": "Val_Gas_Vessel_2_Today", + "dim": 0, + "alias": true, + "instance_id": 177, + "symbol_address": 1604, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_FR": { + "tag_name": "Val_Gas_Vessel_2_FR", + "dim": 0, + "alias": true, + "instance_id": 178, + "symbol_address": 1608, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_Temp": { + "tag_name": "Val_Gas_Vessel_2_Temp", + "dim": 0, + "alias": true, + "instance_id": 179, + "symbol_address": 1612, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_DP": { + "tag_name": "Val_Gas_Vessel_2_DP", + "dim": 0, + "alias": true, + "instance_id": 180, + "symbol_address": 1616, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_SP": { + "tag_name": "Val_Gas_Vessel_2_SP", + "dim": 0, + "alias": true, + "instance_id": 181, + "symbol_address": 1620, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_1_Lifetime": { + "tag_name": "Val_Gas_Vessel_1_Lifetime", + "dim": 0, + "alias": true, + "instance_id": 182, + "symbol_address": 1624, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_1_Yesterday": { + "tag_name": "Val_Gas_Vessel_1_Yesterday", + "dim": 0, + "alias": true, + "instance_id": 183, + "symbol_address": 1628, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_1_Today": { + "tag_name": "Val_Gas_Vessel_1_Today", + "dim": 0, + "alias": true, + "instance_id": 184, + "symbol_address": 1632, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_1_FR": { + "tag_name": "Val_Gas_Vessel_1_FR", + "dim": 0, + "alias": true, + "instance_id": 185, + "symbol_address": 1636, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_1_Temp": { + "tag_name": "Val_Gas_Vessel_1_Temp", + "dim": 0, + "alias": true, + "instance_id": 186, + "symbol_address": 1640, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_1_DP": { + "tag_name": "Val_Gas_Vessel_1_DP", + "dim": 0, + "alias": true, + "instance_id": 187, + "symbol_address": 1644, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_1_SP": { + "tag_name": "Val_Gas_Vessel_1_SP", + "dim": 0, + "alias": true, + "instance_id": 188, + "symbol_address": 1648, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Flare_Lifetime": { + "tag_name": "Val_Gas_Flare_Lifetime", + "dim": 0, + "alias": true, + "instance_id": 189, + "symbol_address": 1652, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Flare_Yesterday": { + "tag_name": "Val_Gas_Flare_Yesterday", + "dim": 0, + "alias": true, + "instance_id": 190, + "symbol_address": 1656, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Flare_Today": { + "tag_name": "Val_Gas_Flare_Today", + "dim": 0, + "alias": true, + "instance_id": 191, + "symbol_address": 1660, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Flare_FR": { + "tag_name": "Val_Gas_Flare_FR", + "dim": 0, + "alias": true, + "instance_id": 192, + "symbol_address": 1664, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Flare_Temp": { + "tag_name": "Val_Gas_Flare_Temp", + "dim": 0, + "alias": true, + "instance_id": 193, + "symbol_address": 1668, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Flare_DP": { + "tag_name": "Val_Gas_Flare_DP", + "dim": 0, + "alias": true, + "instance_id": 194, + "symbol_address": 1672, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Flare_SP": { + "tag_name": "Val_Gas_Flare_SP", + "dim": 0, + "alias": true, + "instance_id": 195, + "symbol_address": 1676, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Lact_Tank_Level_Scaled": { + "tag_name": "Val_Lact_Tank_Level_Scaled", + "dim": 0, + "alias": true, + "instance_id": 196, + "symbol_address": 1680, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "CFG_Lact_Tank_Level_InEUMin": { + "tag_name": "CFG_Lact_Tank_Level_InEUMin", + "dim": 0, + "alias": true, + "instance_id": 197, + "symbol_address": 1684, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "CFG_Lact_Tank_Level_InEUMax": { + "tag_name": "CFG_Lact_Tank_Level_InEUMax", + "dim": 0, + "alias": true, + "instance_id": 198, + "symbol_address": 1688, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "CFG_Lact_Tank_Level_InRawMin": { + "tag_name": "CFG_Lact_Tank_Level_InRawMin", + "dim": 0, + "alias": true, + "instance_id": 199, + "symbol_address": 1692, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "CFG_Lact_Tank_Level_InRawMax": { + "tag_name": "CFG_Lact_Tank_Level_InRawMax", + "dim": 0, + "alias": true, + "instance_id": 200, + "symbol_address": 1696, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Raw_Air_Comp_Disch_PSI": { + "tag_name": "Raw_Air_Comp_Disch_PSI", + "dim": 0, + "alias": true, + "instance_id": 201, + "symbol_address": 1700, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Air_Comp_Disch_PSI": { + "tag_name": "Val_Air_Comp_Disch_PSI", + "dim": 0, + "alias": true, + "instance_id": 202, + "symbol_address": 1704, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_Air_Comp_Disch_Lo": { + "tag_name": "SPT_Air_Comp_Disch_Lo", + "dim": 0, + "alias": true, + "instance_id": 203, + "symbol_address": 1708, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP2_FR": { + "tag_name": "Val_WTP2_FR", + "dim": 0, + "alias": true, + "instance_id": 204, + "symbol_address": 1712, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP2_T1": { + "tag_name": "Val_WTP2_T1", + "dim": 0, + "alias": true, + "instance_id": 205, + "symbol_address": 1716, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP2_Yest_Total": { + "tag_name": "Val_WTP2_Yest_Total", + "dim": 0, + "alias": true, + "instance_id": 206, + "symbol_address": 1720, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP2_Todays_Total": { + "tag_name": "Val_WTP2_Todays_Total", + "dim": 0, + "alias": true, + "instance_id": 207, + "symbol_address": 1724, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP2_LastMonth_Total": { + "tag_name": "Val_WTP2_LastMonth_Total", + "dim": 0, + "alias": true, + "instance_id": 208, + "symbol_address": 1728, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP2_Monthly_Total": { + "tag_name": "Val_WTP2_Monthly_Total", + "dim": 0, + "alias": true, + "instance_id": 209, + "symbol_address": 1732, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "WTP1_Control_Valve_POS": { + "tag_name": "WTP1_Control_Valve_POS", + "dim": 0, + "alias": true, + "instance_id": 210, + "symbol_address": 1736, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "WTP2_Control_Valve_POS": { + "tag_name": "WTP2_Control_Valve_POS", + "dim": 0, + "alias": true, + "instance_id": 211, + "symbol_address": 1740, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Raw_WTP2_Valve_POS": { + "tag_name": "Raw_WTP2_Valve_POS", + "dim": 0, + "alias": true, + "instance_id": 212, + "symbol_address": 1744, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Raw_WTP1_Valve_POS": { + "tag_name": "Raw_WTP1_Valve_POS", + "dim": 0, + "alias": true, + "instance_id": 213, + "symbol_address": 1748, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_Lact_Run": { + "tag_name": "SPT_Lact_Run", + "dim": 0, + "alias": true, + "instance_id": 214, + "symbol_address": 1752, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_Lact_Stop": { + "tag_name": "SPT_Lact_Stop", + "dim": 0, + "alias": true, + "instance_id": 215, + "symbol_address": 1756, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_Density": { + "tag_name": "Val_Gas_Vessel_2_Density", + "dim": 0, + "alias": true, + "instance_id": 216, + "symbol_address": 1760, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_Density": { + "tag_name": "Val_Gas_Vessel_3_Density", + "dim": 0, + "alias": true, + "instance_id": 217, + "symbol_address": 1764, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP1_PID_Manual": { + "tag_name": "SPT_WTP1_PID_Manual", + "dim": 0, + "alias": true, + "instance_id": 218, + "symbol_address": 1768, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP1_Auto": { + "tag_name": "SPT_WTP1_Auto", + "dim": 0, + "alias": true, + "instance_id": 219, + "symbol_address": 1772, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP2_PID_Manual": { + "tag_name": "SPT_WTP2_PID_Manual", + "dim": 0, + "alias": true, + "instance_id": 220, + "symbol_address": 1776, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WTP2_Auto": { + "tag_name": "SPT_WTP2_Auto", + "dim": 0, + "alias": true, + "instance_id": 221, + "symbol_address": 1780, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP1_Valve_FBK": { + "tag_name": "Val_WTP1_Valve_FBK", + "dim": 0, + "alias": true, + "instance_id": 222, + "symbol_address": 1784, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_WTP2_Valve_FBK": { + "tag_name": "Val_WTP2_Valve_FBK", + "dim": 0, + "alias": true, + "instance_id": 223, + "symbol_address": 1788, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Lact_Meter_Temp": { + "tag_name": "Val_Lact_Meter_Temp", + "dim": 0, + "alias": true, + "instance_id": 224, + "symbol_address": 1792, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Lact_Meter_Density": { + "tag_name": "Val_Lact_Meter_Density", + "dim": 0, + "alias": true, + "instance_id": 225, + "symbol_address": 1796, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Lact_Meter_Lifetime": { + "tag_name": "Val_Lact_Meter_Lifetime", + "dim": 0, + "alias": true, + "instance_id": 226, + "symbol_address": 1800, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Lact_Meter_FR": { + "tag_name": "Val_Lact_Meter_FR", + "dim": 0, + "alias": true, + "instance_id": 227, + "symbol_address": 1804, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Lact_Meter_Todays": { + "tag_name": "Val_Lact_Meter_Todays", + "dim": 0, + "alias": true, + "instance_id": 228, + "symbol_address": 1808, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Lact_Meter_Yest": { + "tag_name": "Val_Lact_Meter_Yest", + "dim": 0, + "alias": true, + "instance_id": 229, + "symbol_address": 1812, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_ST1_Clear_High_Alarm": { + "tag_name": "SPT_ST1_Clear_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 230, + "symbol_address": 1816, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_ST1_High_Alarm": { + "tag_name": "SPT_ST1_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 231, + "symbol_address": 1820, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_OT2_Clear_High_Alarm": { + "tag_name": "SPT_OT2_Clear_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 232, + "symbol_address": 1824, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_OT2_High_Alarm": { + "tag_name": "SPT_OT2_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 233, + "symbol_address": 1828, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_OT1_Clear_High_Alarm": { + "tag_name": "SPT_OT1_Clear_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 234, + "symbol_address": 1832, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_OT1_High_Alarm": { + "tag_name": "SPT_OT1_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 235, + "symbol_address": 1836, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WT2_Clear_High_Alarm": { + "tag_name": "SPT_WT2_Clear_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 236, + "symbol_address": 1840, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WT2_High_Alarm": { + "tag_name": "SPT_WT2_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 237, + "symbol_address": 1844, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WT1_Clear_High_Alarm": { + "tag_name": "SPT_WT1_Clear_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 238, + "symbol_address": 1848, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "SPT_WT1_High_Alarm": { + "tag_name": "SPT_WT1_High_Alarm", + "dim": 0, + "alias": true, + "instance_id": 239, + "symbol_address": 1852, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_LastMonth": { + "tag_name": "Val_Gas_Vessel_2_LastMonth", + "dim": 0, + "alias": true, + "instance_id": 240, + "symbol_address": 1856, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_2_Monthly": { + "tag_name": "Val_Gas_Vessel_2_Monthly", + "dim": 0, + "alias": true, + "instance_id": 241, + "symbol_address": 1860, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_LastMonth": { + "tag_name": "Val_Gas_Vessel_3_LastMonth", + "dim": 0, + "alias": true, + "instance_id": 242, + "symbol_address": 1864, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Val_Gas_Vessel_3_Monthly": { + "tag_name": "Val_Gas_Vessel_3_Monthly", + "dim": 0, + "alias": true, + "instance_id": 243, + "symbol_address": 1868, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", + "tag_type": "atomic" + }, + "Oil_Tank_Selector": { + "tag_name": "Oil_Tank_Selector", + "dim": 0, + "alias": true, + "instance_id": 244, + "symbol_address": 1924, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "DINT", + "data_type_name": "DINT", + "tag_type": "atomic" + }, + "Water_Tank_Selector": { + "tag_name": "Water_Tank_Selector", + "dim": 0, + "alias": true, + "instance_id": 245, + "symbol_address": 1928, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "DINT", + "data_type_name": "DINT", + "tag_type": "atomic" + }, + "zero": { + "tag_name": "zero", + "dim": 0, + "alias": true, + "instance_id": 246, + "symbol_address": 1932, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "DINT", + "data_type_name": "DINT", + "tag_type": "atomic" + }, + "WTP1_Alarm": { + "tag_name": "WTP1_Alarm", + "dim": 0, + "alias": true, + "instance_id": 247, + "symbol_address": 1936, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "DINT", + "data_type_name": "DINT", + "tag_type": "atomic" + }, + "WTP2_Alarm": { + "tag_name": "WTP2_Alarm", + "dim": 0, + "alias": true, + "instance_id": 248, + "symbol_address": 1940, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 0, + 0, + 0 + ], + "data_type": "DINT", + "data_type_name": "DINT", + "tag_type": "atomic" + }, + "Placeholder": { + "tag_name": "Placeholder", + "dim": 1, + "alias": true, + "instance_id": 249, + "symbol_address": 2248, + "symbol_object_address": 1613778336, + "software_control": 0, + "external_access": "Read/Write", + "dimensions": [ + 100, + 0, + 0 + ], + "data_type": "REAL", + "data_type_name": "REAL", "tag_type": "atomic" } } \ No newline at end of file